var/home/core/zuul-output/0000755000175000017500000000000015115045711014525 5ustar corecorevar/home/core/zuul-output/logs/0000755000175000017500000000000015115052210015461 5ustar corecorevar/home/core/zuul-output/logs/kubelet.log0000644000000000000000003502112515115052202017665 0ustar rootrootDec 06 15:32:00 crc systemd[1]: Starting Kubernetes Kubelet... Dec 06 15:32:00 crc restorecon[4684]: Relabeled /var/lib/kubelet/config.json from system_u:object_r:unlabeled_t:s0 to system_u:object_r:container_var_lib_t:s0 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/device-plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/device-plugins/kubelet.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/volumes/kubernetes.io~configmap/nginx-conf/..2025_02_23_05_40_35.4114275528/nginx.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/22e96971 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/21c98286 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/0f1869e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/46889d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/5b6a5969 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/6c7921f5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4804f443 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/2a46b283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/a6b5573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4f88ee5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/5a4eee4b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/cd87c521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/38602af4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/1483b002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/0346718b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/d3ed4ada not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/3bb473a5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/8cd075a9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/00ab4760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/54a21c09 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/70478888 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/43802770 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/955a0edc not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/bca2d009 not reset as customized by admin to system_u:object_r:container_file_t:s0:c140,c1009 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/b295f9bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/bc46ea27 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5731fc1b not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5e1b2a3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/943f0936 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/3f764ee4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/8695e3f9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/aed7aa86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/c64d7448 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/0ba16bd2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/207a939f not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/54aa8cdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/1f5fa595 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/bf9c8153 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/47fba4ea not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/7ae55ce9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7906a268 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/ce43fa69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7fc7ea3a not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/d8c38b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/9ef015fb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/b9db6a41 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/b1733d79 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/afccd338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/9df0a185 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/18938cf8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/7ab4eb23 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/56930be6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_35.630010865 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/0d8e3722 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/d22b2e76 not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/e036759f not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/2734c483 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/57878fe7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/3f3c2e58 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/375bec3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/7bc41e08 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/48c7a72d not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/4b66701f not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/a5a1c202 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_40.1388695756 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/26f3df5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/6d8fb21d not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/50e94777 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208473b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/ec9e08ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3b787c39 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208eaed5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/93aa3a2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3c697968 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/ba950ec9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/cb5cdb37 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/f2df9827 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/fedaa673 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/9ca2df95 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/b2d7460e not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2207853c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/241c1c29 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2d910eaf not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/c6c0f2e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/399edc97 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8049f7cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/0cec5484 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/312446d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c406,c828 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8e56a35d not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/2d30ddb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/eca8053d not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/c3a25c9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c168,c522 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/b9609c22 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/e8b0eca9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/b36a9c3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/38af7b07 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/ae821620 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/baa23338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/2c534809 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/59b29eae not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/c91a8e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/4d87494a not reset as customized by admin to system_u:object_r:container_file_t:s0:c442,c857 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/1e33ca63 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/8dea7be2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d0b04a99 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d84f01e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/4109059b not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/a7258a3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/05bdf2b6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/f3261b51 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/315d045e not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/5fdcf278 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/d053f757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/c2850dc7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fcfb0b2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c7ac9b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fa0c0d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c609b6ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/2be6c296 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/89a32653 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/4eb9afeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/13af6efa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/b03f9724 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/e3d105cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/3aed4d83 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/0765fa6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/2cefc627 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/3dcc6345 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/365af391 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b1130c0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/236a5913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b9432e26 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/5ddb0e3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/986dc4fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/8a23ff9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/9728ae68 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/665f31d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/136c9b42 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/98a1575b not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/cac69136 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/5deb77a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/2ae53400 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/e46f2326 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/dc688d3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/3497c3cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/177eb008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/af5a2afa not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/d780cb1f not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/49b0f374 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/26fbb125 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/cf14125a not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/b7f86972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/e51d739c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/88ba6a69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/669a9acf not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/5cd51231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/75349ec7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/15c26839 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/45023dcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/2bb66a50 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/64d03bdd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/ab8e7ca0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/bb9be25f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/9a0b61d3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/d471b9d2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/8cb76b8e not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/11a00840 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/ec355a92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/992f735e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d59cdbbc not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/72133ff0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/c56c834c not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d13724c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/0a498258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa471982 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fc900d92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa7d68da not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/4bacf9b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/424021b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/fc2e31a3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/f51eefac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/c8997f2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/7481f599 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/fdafea19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/d0e1c571 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/ee398915 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/682bb6b8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a3e67855 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a989f289 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/915431bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/7796fdab not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/dcdb5f19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/a3aaa88c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/5508e3e6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/160585de not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/e99f8da3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/8bc85570 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/a5861c91 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/84db1135 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/9e1a6043 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/c1aba1c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/d55ccd6d not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/971cc9f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/8f2e3dcf not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/ceb35e9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/1c192745 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/5209e501 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/f83de4df not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/e7b978ac not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/c64304a1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/5384386b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/cce3e3ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/8fb75465 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/740f573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/32fd1134 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/0a861bd3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/80363026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/bfa952a8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..2025_02_23_05_33_31.333075221 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/793bf43d not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/7db1bb6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/4f6a0368 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/c12c7d86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/36c4a773 not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/4c1e98ae not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/a4c8115c not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/setup/7db1802e not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver/a008a7ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-syncer/2c836bac not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-regeneration-controller/0ce62299 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-insecure-readyz/945d2457 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-check-endpoints/7d5c1dd8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/index.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/bundle-v1.15.0.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/channel.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/package.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/bc8d0691 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/6b76097a not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/34d1af30 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/312ba61c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/645d5dd1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/16e825f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/4cf51fc9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/2a23d348 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/075dbd49 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/dd585ddd not reset as customized by admin to system_u:object_r:container_file_t:s0:c377,c642 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/17ebd0ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c343 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/005579f4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_23_11.1287037894 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/bf5f3b9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/af276eb7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/ea28e322 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/692e6683 not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/871746a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/4eb2e958 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/ca9b62da not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/0edd6fce not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/containers/controller-manager/89b4555f not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/655fcd71 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/0d43c002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/e68efd17 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/9acf9b65 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/5ae3ff11 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/1e59206a not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/27af16d1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c304,c1017 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/7918e729 not reset as customized by admin to system_u:object_r:container_file_t:s0:c853,c893 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/5d976d0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c585,c981 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/d7f55cbb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/f0812073 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/1a56cbeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/7fdd437e not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/cdfb5652 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/fix-audit-permissions/fb93119e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver/f1e8fc0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver-check-endpoints/218511f3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server/serving-certs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/ca8af7b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/72cc8a75 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/6e8a3760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4c3455c0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/2278acb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4b453e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/3ec09bda not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:00 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2/cacerts.bin not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java/cacerts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl/ca-bundle.trust.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/email-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/objsign-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2ae6433e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fde84897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75680d2e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/openshift-service-serving-signer_1740288168.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/facfc4fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f5a969c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CFCA_EV_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9ef4a08a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ingress-operator_1740288202.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2f332aed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/248c8271.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d10a21f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ACCVRAIZ1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a94d09e5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c9a4d3b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40193066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd8c0d63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b936d1c6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CA_Disig_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4fd49c6c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM_SERVIDORES_SEGUROS.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b81b93f0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f9a69fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b30d5fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ANF_Secure_Server_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b433981b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93851c9e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9282e51c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7dd1bc4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Actalis_Authentication_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/930ac5d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f47b495.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e113c810.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5931b5bc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Commercial.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2b349938.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e48193cf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/302904dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a716d4ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Networking.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93bc0acc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/86212b19.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b727005e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbc54cab.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f51bb24c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c28a8a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9c8dfbd4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ccc52f49.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cb1c3204.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ce5e74ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd08c599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6d41d539.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb5fa911.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e35234b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8cb5ee0f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a7c655d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f8fc53da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/de6d66f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d41b5e2a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/41a3f684.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1df5a75f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_2011.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e36a6752.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b872f2b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9576d26b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/228f89db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_ECC_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb717492.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d21b73c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b1b94ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/595e996b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_RSA_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b46e03d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/128f4b91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_3_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81f2d2b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Autoridad_de_Certificacion_Firmaprofesional_CIF_A62634068.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3bde41ac.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d16a5865.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_EC-384_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0179095f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ffa7f1eb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9482e63a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4dae3dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e359ba6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7e067d03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/95aff9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7746a63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Baltimore_CyberTrust_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/653b494a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3ad48a91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_2_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/54657681.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/82223c44.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8de2f56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d9dafe4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d96b65e2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee64a828.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40547a79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5a3f0ff8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a780d93.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/34d996fb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/eed8c118.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/89c02a45.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b1159c4c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d6325660.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4c339cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8312c4c1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_E1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8508e720.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5fdd185d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48bec511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/69105f4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b9bc432.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/32888f65.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b03dec0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/219d9499.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5acf816d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbf06781.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc99f41e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AAA_Certificate_Services.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/985c1f52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8794b4e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_BR_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7c037b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ef954a4e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_EV_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2add47b6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/90c5a3c8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0f3e76e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/53a1b57a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_EV_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5ad8a5d6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/68dd7389.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d04f354.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d6437c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/062cdee6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bd43e1dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7f3d5d1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c491639e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3513523f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/399e7759.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/feffd413.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d18e9066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/607986c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c90bc37d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1b0f7e5c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e08bfd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dd8e9d41.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed39abd0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a3418fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bc3f2570.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_High_Assurance_EV_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/244b5494.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81b9768f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4be590e0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_ECC_P384_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9846683b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/252252d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e8e7201.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_RSA4096_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d52c538d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c44cc0c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Trusted_Root_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75d1b2ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a2c66da8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ecccd8db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust.net_Certification_Authority__2048_.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/aee5f10d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e7271e8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0e59380.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4c3982f2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b99d060.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf64f35b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0a775a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/002c0b4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cc450945.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_EC1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/106f3e4d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b3fb433b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4042bcee.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/02265526.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/455f1b52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0d69c7e1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9f727ac7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5e98733a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0cd152c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc4d6a89.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6187b673.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/FIRMAPROFESIONAL_CA_ROOT-A_WEB.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ba8887ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/068570d1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f081611a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48a195d8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GDCA_TrustAUTH_R5_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f6fa695.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab59055e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b92fd57f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GLOBALTRUST_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fa5da96b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ec40989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7719f463.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1001acf7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f013ecaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/626dceaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c559d742.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1d3472b9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9479c8c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a81e292b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4bfab552.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e071171e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/57bcb2da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_ECC_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab5346f4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5046c355.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_RSA_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/865fbdf9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da0cfd1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/85cde254.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_ECC_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbb3f32b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureSign_RootCA11.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5860aaa6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/31188b5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HiPKI_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c7f1359b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f15c80c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hongkong_Post_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/09789157.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/18856ac4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e09d511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Commercial_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cf701eeb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d06393bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Public_Sector_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/10531352.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Izenpe.com.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureTrust_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0ed035a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsec_e-Szigno_Root_CA_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8160b96c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8651083.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2c63f966.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_ECC_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d89cda1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/01419da9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_RSA_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7a5b843.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_RSA_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf53fb88.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9591a472.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3afde786.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Gold_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NAVER_Global_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3fb36b73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d39b0a2c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a89d74c2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd58d51e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7db1890.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NetLock_Arany__Class_Gold__F__tan__s__tv__ny.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/988a38cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/60afe812.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f39fc864.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5443e9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GB_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e73d606e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dfc0fe80.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b66938e9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e1eab7c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GC_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/773e07ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c899c73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d59297b8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ddcda989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_1_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/749e9e03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/52b525c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7e8dc79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a819ef2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/08063a00.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b483515.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/064e0aa9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1f58a078.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6f7454b3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7fa05551.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76faf6c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9339512a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f387163d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee37c333.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e18bfb83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e442e424.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fe8a2cd8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/23f4c490.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5cd81ad7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0c70a8d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7892ad52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SZAFIR_ROOT_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4f316efb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_RSA_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/06dc52d5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/583d0756.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0bf05006.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/88950faa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9046744a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c860d51.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_RSA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6fa5da56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/33ee480d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Secure_Global_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/63a2c897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_ECC_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bdacca6f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ff34af3f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbff3a01.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_ECC_RootCA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_C1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/406c9bb1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_C3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Services_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Silver_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/99e1b953.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/14bc7599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TUBITAK_Kamu_SM_SSL_Kok_Sertifikasi_-_Surum_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a3adc42.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f459871d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_ECC_Root_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_RSA_Root_2023.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TeliaSonera_Root_CA_v1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telia_Root_CA_v2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f103249.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f058632f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-certificates.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9bf03295.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/98aaf404.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1cef98f5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/073bfcc5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2923b3f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f249de83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/edcbddb5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P256_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b5697b0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ae85e5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b74d2bd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P384_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d887a5bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9aef356c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TunTrust_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd64f3fc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e13665f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Extended_Validation_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f5dc4f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da7377f6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Global_G2_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c01eb047.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/304d27c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed858448.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f30dd6ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/04f60c28.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_ECC_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fc5a8f99.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/35105088.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee532fd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/XRamp_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/706f604c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76579174.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d86cdd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/882de061.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f618aec.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a9d40e02.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e-Szigno_Root_CA_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e868b802.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/83e9984f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ePKI_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca6e4ad9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d6523ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4b718d9b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/869fbf79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/containers/registry/f8d22bdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/6e8bbfac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/54dd7996 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/a4f1bb05 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/207129da not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/c1df39e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/15b8f1cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/77bd6913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/2382c1b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/704ce128 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/70d16fe0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/bfb95535 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/57a8e8e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/1b9d3e5e not reset as customized by admin to system_u:object_r:container_file_t:s0:c107,c917 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/fddb173c not reset as customized by admin to system_u:object_r:container_file_t:s0:c202,c983 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/95d3c6c4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/bfb5fff5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/2aef40aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/c0391cad not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/1119e69d not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/660608b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/8220bd53 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/85f99d5c not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/4b0225f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/9c2a3394 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/e820b243 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/1ca52ea0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/e6988e45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/6655f00b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/98bc3986 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/08e3458a not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/2a191cb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/6c4eeefb not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/f61a549c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/24891863 not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/fbdfd89c not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/9b63b3bc not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/8acde6d6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/node-driver-registrar/59ecbba3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/csi-provisioner/685d4be3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/containers/route-controller-manager/feaea55e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/63709497 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/d966b7fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/f5773757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/81c9edb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/57bf57ee not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/86f5e6aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/0aabe31d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/d2af85c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/09d157d9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c0fe7256 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c30319e4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/e6b1dd45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/2bb643f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/920de426 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/70fa1e87 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/a1c12a2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/9442e6c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/5b45ec72 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/3c9f3a59 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/1091c11b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/9a6821c6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/ec0c35e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/517f37e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/6214fe78 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/ba189c8b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/351e4f31 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/c0f219ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/8069f607 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/559c3d82 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/605ad488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/148df488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/3bf6dcb4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/022a2feb not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/938c3924 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/729fe23e not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/1fd5cbd4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/a96697e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/e155ddca not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/10dd0e0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/6f2c8392 not reset as customized by admin to system_u:object_r:container_file_t:s0:c267,c588 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/bd241ad9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/plugins/csi-hostpath not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/plugins/csi-hostpath/csi.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/plugins/kubernetes.io not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/plugins/kubernetes.io/csi not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983 not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/vol_data.json not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 06 15:32:01 crc restorecon[4684]: /var/lib/kubelet/plugins_registry not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 06 15:32:01 crc restorecon[4684]: Relabeled /var/usrlocal/bin/kubenswrapper from system_u:object_r:bin_t:s0 to system_u:object_r:kubelet_exec_t:s0 Dec 06 15:32:01 crc kubenswrapper[5003]: Flag --container-runtime-endpoint has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 06 15:32:01 crc kubenswrapper[5003]: Flag --minimum-container-ttl-duration has been deprecated, Use --eviction-hard or --eviction-soft instead. Will be removed in a future version. Dec 06 15:32:01 crc kubenswrapper[5003]: Flag --volume-plugin-dir has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 06 15:32:01 crc kubenswrapper[5003]: Flag --register-with-taints has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 06 15:32:01 crc kubenswrapper[5003]: Flag --pod-infra-container-image has been deprecated, will be removed in a future release. Image garbage collector will get sandbox image information from CRI. Dec 06 15:32:01 crc kubenswrapper[5003]: Flag --system-reserved has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.576475 5003 server.go:211] "--pod-infra-container-image will not be pruned by the image garbage collector in kubelet and should also be set in the remote runtime" Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.580105 5003 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.580128 5003 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.580138 5003 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.580146 5003 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.580153 5003 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.580159 5003 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.580165 5003 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.580182 5003 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.580188 5003 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.580193 5003 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.580198 5003 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.580202 5003 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.580207 5003 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.580211 5003 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.580216 5003 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.580220 5003 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.580224 5003 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.580229 5003 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.580233 5003 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.580237 5003 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.580242 5003 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.580247 5003 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.580252 5003 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.580256 5003 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.580261 5003 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.580266 5003 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.580270 5003 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.580274 5003 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.580279 5003 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.580285 5003 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.580291 5003 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.580295 5003 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.580300 5003 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.580304 5003 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.580309 5003 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.580313 5003 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.580318 5003 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.580324 5003 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.580329 5003 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.580333 5003 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.580338 5003 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.580342 5003 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.580347 5003 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.580352 5003 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.580358 5003 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.580363 5003 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.580367 5003 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.580372 5003 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.580376 5003 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.580380 5003 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.580385 5003 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.580390 5003 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.580394 5003 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.580398 5003 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.580404 5003 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.580410 5003 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.580415 5003 feature_gate.go:330] unrecognized feature gate: Example Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.580420 5003 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.580424 5003 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.580429 5003 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.580433 5003 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.580438 5003 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.580442 5003 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.580447 5003 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.580451 5003 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.580456 5003 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.580461 5003 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.580465 5003 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.580470 5003 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.580474 5003 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.580480 5003 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.580585 5003 flags.go:64] FLAG: --address="0.0.0.0" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.580596 5003 flags.go:64] FLAG: --allowed-unsafe-sysctls="[]" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.580606 5003 flags.go:64] FLAG: --anonymous-auth="true" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.580613 5003 flags.go:64] FLAG: --application-metrics-count-limit="100" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.580619 5003 flags.go:64] FLAG: --authentication-token-webhook="false" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.580625 5003 flags.go:64] FLAG: --authentication-token-webhook-cache-ttl="2m0s" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.580632 5003 flags.go:64] FLAG: --authorization-mode="AlwaysAllow" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.580639 5003 flags.go:64] FLAG: --authorization-webhook-cache-authorized-ttl="5m0s" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.580645 5003 flags.go:64] FLAG: --authorization-webhook-cache-unauthorized-ttl="30s" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.580650 5003 flags.go:64] FLAG: --boot-id-file="/proc/sys/kernel/random/boot_id" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.580656 5003 flags.go:64] FLAG: --bootstrap-kubeconfig="/etc/kubernetes/kubeconfig" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.580661 5003 flags.go:64] FLAG: --cert-dir="/var/lib/kubelet/pki" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.580667 5003 flags.go:64] FLAG: --cgroup-driver="cgroupfs" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.580672 5003 flags.go:64] FLAG: --cgroup-root="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.580676 5003 flags.go:64] FLAG: --cgroups-per-qos="true" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.580682 5003 flags.go:64] FLAG: --client-ca-file="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.580686 5003 flags.go:64] FLAG: --cloud-config="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.580691 5003 flags.go:64] FLAG: --cloud-provider="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.580696 5003 flags.go:64] FLAG: --cluster-dns="[]" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.580702 5003 flags.go:64] FLAG: --cluster-domain="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.580706 5003 flags.go:64] FLAG: --config="/etc/kubernetes/kubelet.conf" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.580712 5003 flags.go:64] FLAG: --config-dir="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.580716 5003 flags.go:64] FLAG: --container-hints="/etc/cadvisor/container_hints.json" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.580722 5003 flags.go:64] FLAG: --container-log-max-files="5" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.580729 5003 flags.go:64] FLAG: --container-log-max-size="10Mi" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.580734 5003 flags.go:64] FLAG: --container-runtime-endpoint="/var/run/crio/crio.sock" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.580738 5003 flags.go:64] FLAG: --containerd="/run/containerd/containerd.sock" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.580744 5003 flags.go:64] FLAG: --containerd-namespace="k8s.io" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.580749 5003 flags.go:64] FLAG: --contention-profiling="false" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.580754 5003 flags.go:64] FLAG: --cpu-cfs-quota="true" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.580761 5003 flags.go:64] FLAG: --cpu-cfs-quota-period="100ms" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.580767 5003 flags.go:64] FLAG: --cpu-manager-policy="none" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.580772 5003 flags.go:64] FLAG: --cpu-manager-policy-options="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.580779 5003 flags.go:64] FLAG: --cpu-manager-reconcile-period="10s" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.580784 5003 flags.go:64] FLAG: --enable-controller-attach-detach="true" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.580790 5003 flags.go:64] FLAG: --enable-debugging-handlers="true" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.580795 5003 flags.go:64] FLAG: --enable-load-reader="false" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.580800 5003 flags.go:64] FLAG: --enable-server="true" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.580805 5003 flags.go:64] FLAG: --enforce-node-allocatable="[pods]" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.580813 5003 flags.go:64] FLAG: --event-burst="100" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.580818 5003 flags.go:64] FLAG: --event-qps="50" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.580824 5003 flags.go:64] FLAG: --event-storage-age-limit="default=0" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.580829 5003 flags.go:64] FLAG: --event-storage-event-limit="default=0" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.580834 5003 flags.go:64] FLAG: --eviction-hard="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.580842 5003 flags.go:64] FLAG: --eviction-max-pod-grace-period="0" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.580847 5003 flags.go:64] FLAG: --eviction-minimum-reclaim="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.580852 5003 flags.go:64] FLAG: --eviction-pressure-transition-period="5m0s" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.580858 5003 flags.go:64] FLAG: --eviction-soft="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.580863 5003 flags.go:64] FLAG: --eviction-soft-grace-period="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.580869 5003 flags.go:64] FLAG: --exit-on-lock-contention="false" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.580874 5003 flags.go:64] FLAG: --experimental-allocatable-ignore-eviction="false" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.580880 5003 flags.go:64] FLAG: --experimental-mounter-path="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.580885 5003 flags.go:64] FLAG: --fail-cgroupv1="false" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.580890 5003 flags.go:64] FLAG: --fail-swap-on="true" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.580896 5003 flags.go:64] FLAG: --feature-gates="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.580902 5003 flags.go:64] FLAG: --file-check-frequency="20s" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.580908 5003 flags.go:64] FLAG: --global-housekeeping-interval="1m0s" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.580913 5003 flags.go:64] FLAG: --hairpin-mode="promiscuous-bridge" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.580919 5003 flags.go:64] FLAG: --healthz-bind-address="127.0.0.1" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.580924 5003 flags.go:64] FLAG: --healthz-port="10248" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.580930 5003 flags.go:64] FLAG: --help="false" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.580935 5003 flags.go:64] FLAG: --hostname-override="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.580941 5003 flags.go:64] FLAG: --housekeeping-interval="10s" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.580946 5003 flags.go:64] FLAG: --http-check-frequency="20s" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.580952 5003 flags.go:64] FLAG: --image-credential-provider-bin-dir="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.580957 5003 flags.go:64] FLAG: --image-credential-provider-config="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.580962 5003 flags.go:64] FLAG: --image-gc-high-threshold="85" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.580968 5003 flags.go:64] FLAG: --image-gc-low-threshold="80" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.580973 5003 flags.go:64] FLAG: --image-service-endpoint="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.580978 5003 flags.go:64] FLAG: --kernel-memcg-notification="false" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.580983 5003 flags.go:64] FLAG: --kube-api-burst="100" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.580988 5003 flags.go:64] FLAG: --kube-api-content-type="application/vnd.kubernetes.protobuf" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.580994 5003 flags.go:64] FLAG: --kube-api-qps="50" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.580999 5003 flags.go:64] FLAG: --kube-reserved="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.581005 5003 flags.go:64] FLAG: --kube-reserved-cgroup="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.581026 5003 flags.go:64] FLAG: --kubeconfig="/var/lib/kubelet/kubeconfig" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.581033 5003 flags.go:64] FLAG: --kubelet-cgroups="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.581038 5003 flags.go:64] FLAG: --local-storage-capacity-isolation="true" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.581043 5003 flags.go:64] FLAG: --lock-file="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.581049 5003 flags.go:64] FLAG: --log-cadvisor-usage="false" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.581055 5003 flags.go:64] FLAG: --log-flush-frequency="5s" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.581061 5003 flags.go:64] FLAG: --log-json-info-buffer-size="0" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.581069 5003 flags.go:64] FLAG: --log-json-split-stream="false" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.581075 5003 flags.go:64] FLAG: --log-text-info-buffer-size="0" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.581080 5003 flags.go:64] FLAG: --log-text-split-stream="false" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.581085 5003 flags.go:64] FLAG: --logging-format="text" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.581090 5003 flags.go:64] FLAG: --machine-id-file="/etc/machine-id,/var/lib/dbus/machine-id" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.581096 5003 flags.go:64] FLAG: --make-iptables-util-chains="true" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.581101 5003 flags.go:64] FLAG: --manifest-url="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.581105 5003 flags.go:64] FLAG: --manifest-url-header="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.581112 5003 flags.go:64] FLAG: --max-housekeeping-interval="15s" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.581118 5003 flags.go:64] FLAG: --max-open-files="1000000" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.581125 5003 flags.go:64] FLAG: --max-pods="110" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.581130 5003 flags.go:64] FLAG: --maximum-dead-containers="-1" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.581136 5003 flags.go:64] FLAG: --maximum-dead-containers-per-container="1" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.581141 5003 flags.go:64] FLAG: --memory-manager-policy="None" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.581146 5003 flags.go:64] FLAG: --minimum-container-ttl-duration="6m0s" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.581152 5003 flags.go:64] FLAG: --minimum-image-ttl-duration="2m0s" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.581157 5003 flags.go:64] FLAG: --node-ip="192.168.126.11" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.581162 5003 flags.go:64] FLAG: --node-labels="node-role.kubernetes.io/control-plane=,node-role.kubernetes.io/master=,node.openshift.io/os_id=rhcos" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.581174 5003 flags.go:64] FLAG: --node-status-max-images="50" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.581180 5003 flags.go:64] FLAG: --node-status-update-frequency="10s" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.581185 5003 flags.go:64] FLAG: --oom-score-adj="-999" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.581190 5003 flags.go:64] FLAG: --pod-cidr="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.581196 5003 flags.go:64] FLAG: --pod-infra-container-image="quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:33549946e22a9ffa738fd94b1345f90921bc8f92fa6137784cb33c77ad806f9d" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.581204 5003 flags.go:64] FLAG: --pod-manifest-path="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.581210 5003 flags.go:64] FLAG: --pod-max-pids="-1" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.581215 5003 flags.go:64] FLAG: --pods-per-core="0" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.581220 5003 flags.go:64] FLAG: --port="10250" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.581225 5003 flags.go:64] FLAG: --protect-kernel-defaults="false" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.581230 5003 flags.go:64] FLAG: --provider-id="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.581236 5003 flags.go:64] FLAG: --qos-reserved="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.581241 5003 flags.go:64] FLAG: --read-only-port="10255" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.581246 5003 flags.go:64] FLAG: --register-node="true" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.581251 5003 flags.go:64] FLAG: --register-schedulable="true" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.581256 5003 flags.go:64] FLAG: --register-with-taints="node-role.kubernetes.io/master=:NoSchedule" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.581266 5003 flags.go:64] FLAG: --registry-burst="10" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.581271 5003 flags.go:64] FLAG: --registry-qps="5" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.581277 5003 flags.go:64] FLAG: --reserved-cpus="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.581282 5003 flags.go:64] FLAG: --reserved-memory="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.581288 5003 flags.go:64] FLAG: --resolv-conf="/etc/resolv.conf" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.581293 5003 flags.go:64] FLAG: --root-dir="/var/lib/kubelet" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.581299 5003 flags.go:64] FLAG: --rotate-certificates="false" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.581304 5003 flags.go:64] FLAG: --rotate-server-certificates="false" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.581309 5003 flags.go:64] FLAG: --runonce="false" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.581314 5003 flags.go:64] FLAG: --runtime-cgroups="/system.slice/crio.service" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.581320 5003 flags.go:64] FLAG: --runtime-request-timeout="2m0s" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.581326 5003 flags.go:64] FLAG: --seccomp-default="false" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.581331 5003 flags.go:64] FLAG: --serialize-image-pulls="true" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.581336 5003 flags.go:64] FLAG: --storage-driver-buffer-duration="1m0s" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.581341 5003 flags.go:64] FLAG: --storage-driver-db="cadvisor" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.581347 5003 flags.go:64] FLAG: --storage-driver-host="localhost:8086" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.581352 5003 flags.go:64] FLAG: --storage-driver-password="root" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.581357 5003 flags.go:64] FLAG: --storage-driver-secure="false" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.581363 5003 flags.go:64] FLAG: --storage-driver-table="stats" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.581368 5003 flags.go:64] FLAG: --storage-driver-user="root" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.581373 5003 flags.go:64] FLAG: --streaming-connection-idle-timeout="4h0m0s" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.581378 5003 flags.go:64] FLAG: --sync-frequency="1m0s" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.581384 5003 flags.go:64] FLAG: --system-cgroups="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.581389 5003 flags.go:64] FLAG: --system-reserved="cpu=200m,ephemeral-storage=350Mi,memory=350Mi" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.581397 5003 flags.go:64] FLAG: --system-reserved-cgroup="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.581402 5003 flags.go:64] FLAG: --tls-cert-file="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.581407 5003 flags.go:64] FLAG: --tls-cipher-suites="[]" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.581413 5003 flags.go:64] FLAG: --tls-min-version="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.581418 5003 flags.go:64] FLAG: --tls-private-key-file="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.581423 5003 flags.go:64] FLAG: --topology-manager-policy="none" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.581429 5003 flags.go:64] FLAG: --topology-manager-policy-options="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.581434 5003 flags.go:64] FLAG: --topology-manager-scope="container" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.581439 5003 flags.go:64] FLAG: --v="2" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.581450 5003 flags.go:64] FLAG: --version="false" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.581457 5003 flags.go:64] FLAG: --vmodule="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.581463 5003 flags.go:64] FLAG: --volume-plugin-dir="/etc/kubernetes/kubelet-plugins/volume/exec" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.581470 5003 flags.go:64] FLAG: --volume-stats-agg-period="1m0s" Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.581616 5003 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.581625 5003 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.581631 5003 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.581636 5003 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.581641 5003 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.581646 5003 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.581652 5003 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.581657 5003 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.581662 5003 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.581667 5003 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.581671 5003 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.581676 5003 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.581681 5003 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.581685 5003 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.581690 5003 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.581694 5003 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.581699 5003 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.581703 5003 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.581708 5003 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.581712 5003 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.581717 5003 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.581721 5003 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.581726 5003 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.581730 5003 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.581735 5003 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.581739 5003 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.581744 5003 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.581748 5003 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.581753 5003 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.581757 5003 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.581763 5003 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.581769 5003 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.581774 5003 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.581779 5003 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.581784 5003 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.581790 5003 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.581797 5003 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.581803 5003 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.581808 5003 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.581813 5003 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.581818 5003 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.581822 5003 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.581827 5003 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.581832 5003 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.581837 5003 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.581843 5003 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.581848 5003 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.581854 5003 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.581859 5003 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.581864 5003 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.581869 5003 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.581873 5003 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.581878 5003 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.581882 5003 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.581887 5003 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.581898 5003 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.581903 5003 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.581909 5003 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.581918 5003 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.581923 5003 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.581927 5003 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.581932 5003 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.581936 5003 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.581941 5003 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.581946 5003 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.581950 5003 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.581954 5003 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.581959 5003 feature_gate.go:330] unrecognized feature gate: Example Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.581963 5003 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.581968 5003 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.581972 5003 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.582132 5003 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.591736 5003 server.go:491] "Kubelet version" kubeletVersion="v1.31.5" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.591777 5003 server.go:493] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK="" Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.591847 5003 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.591857 5003 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.591864 5003 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.591869 5003 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.591874 5003 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.591879 5003 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.591883 5003 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.591887 5003 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.591892 5003 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.591896 5003 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.591899 5003 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.591903 5003 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.591906 5003 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.591910 5003 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.591914 5003 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.591919 5003 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.591924 5003 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.591927 5003 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.591931 5003 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.591935 5003 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.591939 5003 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.591944 5003 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.591948 5003 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.591952 5003 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.591957 5003 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.591962 5003 feature_gate.go:330] unrecognized feature gate: Example Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.591997 5003 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.592002 5003 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.592007 5003 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.592013 5003 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.592018 5003 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.592022 5003 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.592027 5003 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.592031 5003 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.592036 5003 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.592041 5003 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.592046 5003 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.592050 5003 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.592054 5003 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.592058 5003 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.592062 5003 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.592065 5003 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.592069 5003 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.592073 5003 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.592076 5003 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.592081 5003 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.592084 5003 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.592088 5003 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.592091 5003 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.592095 5003 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.592099 5003 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.592103 5003 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.592106 5003 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.592111 5003 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.592116 5003 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.592120 5003 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.592125 5003 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.592130 5003 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.592134 5003 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.592138 5003 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.592142 5003 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.592148 5003 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.592152 5003 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.592156 5003 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.592160 5003 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.592165 5003 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.592171 5003 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.592175 5003 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.592180 5003 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.592183 5003 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.592187 5003 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.592194 5003 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.592321 5003 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.592329 5003 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.592333 5003 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.592337 5003 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.592341 5003 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.592345 5003 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.592348 5003 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.592352 5003 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.592355 5003 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.592359 5003 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.592362 5003 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.592365 5003 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.592369 5003 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.592372 5003 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.592376 5003 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.592381 5003 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.592386 5003 feature_gate.go:330] unrecognized feature gate: Example Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.592390 5003 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.592395 5003 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.592399 5003 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.592403 5003 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.592406 5003 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.592410 5003 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.592415 5003 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.592420 5003 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.592424 5003 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.592428 5003 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.592432 5003 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.592437 5003 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.592440 5003 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.592445 5003 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.592449 5003 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.592452 5003 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.592456 5003 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.592459 5003 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.592463 5003 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.592467 5003 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.592655 5003 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.592659 5003 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.592663 5003 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.592667 5003 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.592670 5003 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.592674 5003 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.592678 5003 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.592683 5003 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.592688 5003 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.592692 5003 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.592696 5003 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.592701 5003 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.592704 5003 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.592709 5003 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.592712 5003 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.592716 5003 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.592720 5003 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.592724 5003 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.592727 5003 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.592732 5003 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.592736 5003 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.592739 5003 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.592743 5003 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.592747 5003 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.592750 5003 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.592754 5003 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.592757 5003 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.592761 5003 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.592764 5003 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.592768 5003 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.592772 5003 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.592775 5003 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.592779 5003 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.592782 5003 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.592789 5003 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.592962 5003 server.go:940] "Client rotation is on, will bootstrap in background" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.595908 5003 bootstrap.go:85] "Current kubeconfig file contents are still valid, no bootstrap necessary" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.596070 5003 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-client-current.pem". Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.596934 5003 server.go:997] "Starting client certificate rotation" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.596966 5003 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate rotation is enabled Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.597268 5003 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate expiration is 2026-02-24 05:52:08 +0000 UTC, rotation deadline is 2025-11-18 12:35:54.859536396 +0000 UTC Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.597390 5003 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Rotating certificates Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.601588 5003 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Dec 06 15:32:01 crc kubenswrapper[5003]: E1206 15:32:01.602630 5003 certificate_manager.go:562] "Unhandled Error" err="kubernetes.io/kube-apiserver-client-kubelet: Failed while requesting a signed certificate from the control plane: cannot create certificate signing request: Post \"https://api-int.crc.testing:6443/apis/certificates.k8s.io/v1/certificatesigningrequests\": dial tcp 38.102.83.73:6443: connect: connection refused" logger="UnhandledError" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.606078 5003 dynamic_cafile_content.go:161] "Starting controller" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.613665 5003 log.go:25] "Validated CRI v1 runtime API" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.625408 5003 log.go:25] "Validated CRI v1 image API" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.626715 5003 server.go:1437] "Using cgroup driver setting received from the CRI runtime" cgroupDriver="systemd" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.628551 5003 fs.go:133] Filesystem UUIDs: map[0b076daa-c26a-46d2-b3a6-72a8dbc6e257:/dev/vda4 2025-12-06-15-28-18-00:/dev/sr0 7B77-95E7:/dev/vda2 de0497b0-db1b-465a-b278-03db02455c71:/dev/vda3] Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.628631 5003 fs.go:134] Filesystem partitions: map[/dev/shm:{mountpoint:/dev/shm major:0 minor:22 fsType:tmpfs blockSize:0} /dev/vda3:{mountpoint:/boot major:252 minor:3 fsType:ext4 blockSize:0} /dev/vda4:{mountpoint:/var major:252 minor:4 fsType:xfs blockSize:0} /run:{mountpoint:/run major:0 minor:24 fsType:tmpfs blockSize:0} /run/user/1000:{mountpoint:/run/user/1000 major:0 minor:42 fsType:tmpfs blockSize:0} /tmp:{mountpoint:/tmp major:0 minor:30 fsType:tmpfs blockSize:0} /var/lib/etcd:{mountpoint:/var/lib/etcd major:0 minor:43 fsType:tmpfs blockSize:0}] Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.646281 5003 manager.go:217] Machine: {Timestamp:2025-12-06 15:32:01.644620744 +0000 UTC m=+0.177975205 CPUVendorID:AuthenticAMD NumCores:12 NumPhysicalCores:1 NumSockets:12 CpuFrequency:2799998 MemoryCapacity:33654128640 SwapCapacity:0 MemoryByType:map[] NVMInfo:{MemoryModeCapacity:0 AppDirectModeCapacity:0 AvgPowerBudget:0} HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] MachineID:21801e6708c44f15b81395eb736a7cec SystemUUID:42b9e6db-6fd4-4e84-a5b7-176f28bfe2f1 BootID:010ec561-c8bb-454b-ab74-658add58caba Filesystems:[{Device:/var/lib/etcd DeviceMajor:0 DeviceMinor:43 Capacity:1073741824 Type:vfs Inodes:4108170 HasInodes:true} {Device:/dev/shm DeviceMajor:0 DeviceMinor:22 Capacity:16827064320 Type:vfs Inodes:4108170 HasInodes:true} {Device:/run DeviceMajor:0 DeviceMinor:24 Capacity:6730825728 Type:vfs Inodes:819200 HasInodes:true} {Device:/dev/vda4 DeviceMajor:252 DeviceMinor:4 Capacity:85292941312 Type:vfs Inodes:41679680 HasInodes:true} {Device:/tmp DeviceMajor:0 DeviceMinor:30 Capacity:16827064320 Type:vfs Inodes:1048576 HasInodes:true} {Device:/dev/vda3 DeviceMajor:252 DeviceMinor:3 Capacity:366869504 Type:vfs Inodes:98304 HasInodes:true} {Device:/run/user/1000 DeviceMajor:0 DeviceMinor:42 Capacity:3365412864 Type:vfs Inodes:821634 HasInodes:true}] DiskMap:map[252:0:{Name:vda Major:252 Minor:0 Size:214748364800 Scheduler:none}] NetworkDevices:[{Name:br-ex MacAddress:fa:16:3e:29:ed:c9 Speed:0 Mtu:1500} {Name:br-int MacAddress:d6:39:55:2e:22:71 Speed:0 Mtu:1400} {Name:ens3 MacAddress:fa:16:3e:29:ed:c9 Speed:-1 Mtu:1500} {Name:ens7 MacAddress:fa:16:3e:94:79:19 Speed:-1 Mtu:1500} {Name:ens7.20 MacAddress:52:54:00:dc:26:9c Speed:-1 Mtu:1496} {Name:ens7.21 MacAddress:52:54:00:66:49:c5 Speed:-1 Mtu:1496} {Name:ens7.22 MacAddress:52:54:00:a8:ef:f8 Speed:-1 Mtu:1496} {Name:eth10 MacAddress:1a:bb:1c:4f:6b:04 Speed:0 Mtu:1500} {Name:ovn-k8s-mp0 MacAddress:0a:58:0a:d9:00:02 Speed:0 Mtu:1400} {Name:ovs-system MacAddress:a2:47:98:8e:3b:e9 Speed:0 Mtu:1500}] Topology:[{Id:0 Memory:33654128640 HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] Cores:[{Id:0 Threads:[0] Caches:[{Id:0 Size:32768 Type:Data Level:1} {Id:0 Size:32768 Type:Instruction Level:1} {Id:0 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:0 Size:16777216 Type:Unified Level:3}] SocketID:0 BookID: DrawerID:} {Id:0 Threads:[1] Caches:[{Id:1 Size:32768 Type:Data Level:1} {Id:1 Size:32768 Type:Instruction Level:1} {Id:1 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:1 Size:16777216 Type:Unified Level:3}] SocketID:1 BookID: DrawerID:} {Id:0 Threads:[10] Caches:[{Id:10 Size:32768 Type:Data Level:1} {Id:10 Size:32768 Type:Instruction Level:1} {Id:10 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:10 Size:16777216 Type:Unified Level:3}] SocketID:10 BookID: DrawerID:} {Id:0 Threads:[11] Caches:[{Id:11 Size:32768 Type:Data Level:1} {Id:11 Size:32768 Type:Instruction Level:1} {Id:11 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:11 Size:16777216 Type:Unified Level:3}] SocketID:11 BookID: DrawerID:} {Id:0 Threads:[2] Caches:[{Id:2 Size:32768 Type:Data Level:1} {Id:2 Size:32768 Type:Instruction Level:1} {Id:2 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:2 Size:16777216 Type:Unified Level:3}] SocketID:2 BookID: DrawerID:} {Id:0 Threads:[3] Caches:[{Id:3 Size:32768 Type:Data Level:1} {Id:3 Size:32768 Type:Instruction Level:1} {Id:3 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:3 Size:16777216 Type:Unified Level:3}] SocketID:3 BookID: DrawerID:} {Id:0 Threads:[4] Caches:[{Id:4 Size:32768 Type:Data Level:1} {Id:4 Size:32768 Type:Instruction Level:1} {Id:4 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:4 Size:16777216 Type:Unified Level:3}] SocketID:4 BookID: DrawerID:} {Id:0 Threads:[5] Caches:[{Id:5 Size:32768 Type:Data Level:1} {Id:5 Size:32768 Type:Instruction Level:1} {Id:5 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:5 Size:16777216 Type:Unified Level:3}] SocketID:5 BookID: DrawerID:} {Id:0 Threads:[6] Caches:[{Id:6 Size:32768 Type:Data Level:1} {Id:6 Size:32768 Type:Instruction Level:1} {Id:6 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:6 Size:16777216 Type:Unified Level:3}] SocketID:6 BookID: DrawerID:} {Id:0 Threads:[7] Caches:[{Id:7 Size:32768 Type:Data Level:1} {Id:7 Size:32768 Type:Instruction Level:1} {Id:7 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:7 Size:16777216 Type:Unified Level:3}] SocketID:7 BookID: DrawerID:} {Id:0 Threads:[8] Caches:[{Id:8 Size:32768 Type:Data Level:1} {Id:8 Size:32768 Type:Instruction Level:1} {Id:8 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:8 Size:16777216 Type:Unified Level:3}] SocketID:8 BookID: DrawerID:} {Id:0 Threads:[9] Caches:[{Id:9 Size:32768 Type:Data Level:1} {Id:9 Size:32768 Type:Instruction Level:1} {Id:9 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:9 Size:16777216 Type:Unified Level:3}] SocketID:9 BookID: DrawerID:}] Caches:[] Distances:[10]}] CloudProvider:Unknown InstanceType:Unknown InstanceID:None} Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.646575 5003 manager_no_libpfm.go:29] cAdvisor is build without cgo and/or libpfm support. Perf event counters are not available. Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.646789 5003 manager.go:233] Version: {KernelVersion:5.14.0-427.50.2.el9_4.x86_64 ContainerOsVersion:Red Hat Enterprise Linux CoreOS 418.94.202502100215-0 DockerVersion: DockerAPIVersion: CadvisorVersion: CadvisorRevision:} Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.647343 5003 swap_util.go:113] "Swap is on" /proc/swaps contents="Filename\t\t\t\tType\t\tSize\t\tUsed\t\tPriority" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.647578 5003 container_manager_linux.go:267] "Container manager verified user specified cgroup-root exists" cgroupRoot=[] Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.647621 5003 container_manager_linux.go:272] "Creating Container Manager object based on Node Config" nodeConfig={"NodeName":"crc","RuntimeCgroupsName":"/system.slice/crio.service","SystemCgroupsName":"/system.slice","KubeletCgroupsName":"","KubeletOOMScoreAdj":-999,"ContainerRuntime":"","CgroupsPerQOS":true,"CgroupRoot":"/","CgroupDriver":"systemd","KubeletRootDir":"/var/lib/kubelet","ProtectKernelDefaults":true,"KubeReservedCgroupName":"","SystemReservedCgroupName":"","ReservedSystemCPUs":{},"EnforceNodeAllocatable":{"pods":{}},"KubeReserved":null,"SystemReserved":{"cpu":"200m","ephemeral-storage":"350Mi","memory":"350Mi"},"HardEvictionThresholds":[{"Signal":"imagefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.15},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null},{"Signal":"memory.available","Operator":"LessThan","Value":{"Quantity":"100Mi","Percentage":0},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.1},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null}],"QOSReserved":{},"CPUManagerPolicy":"none","CPUManagerPolicyOptions":null,"TopologyManagerScope":"container","CPUManagerReconcilePeriod":10000000000,"ExperimentalMemoryManagerPolicy":"None","ExperimentalMemoryManagerReservedMemory":null,"PodPidsLimit":4096,"EnforceCPULimits":true,"CPUCFSQuotaPeriod":100000000,"TopologyManagerPolicy":"none","TopologyManagerPolicyOptions":null,"CgroupVersion":2} Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.648042 5003 topology_manager.go:138] "Creating topology manager with none policy" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.648064 5003 container_manager_linux.go:303] "Creating device plugin manager" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.648332 5003 manager.go:142] "Creating Device Plugin manager" path="/var/lib/kubelet/device-plugins/kubelet.sock" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.648357 5003 server.go:66] "Creating device plugin registration server" version="v1beta1" socket="/var/lib/kubelet/device-plugins/kubelet.sock" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.648639 5003 state_mem.go:36] "Initialized new in-memory state store" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.648738 5003 server.go:1245] "Using root directory" path="/var/lib/kubelet" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.649338 5003 kubelet.go:418] "Attempting to sync node with API server" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.649360 5003 kubelet.go:313] "Adding static pod path" path="/etc/kubernetes/manifests" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.649387 5003 file.go:69] "Watching path" path="/etc/kubernetes/manifests" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.649403 5003 kubelet.go:324] "Adding apiserver pod source" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.649416 5003 apiserver.go:42] "Waiting for node sync before watching apiserver pods" Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.650558 5003 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.73:6443: connect: connection refused Dec 06 15:32:01 crc kubenswrapper[5003]: E1206 15:32:01.650763 5003 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.73:6443: connect: connection refused" logger="UnhandledError" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.650855 5003 kuberuntime_manager.go:262] "Container runtime initialized" containerRuntime="cri-o" version="1.31.5-4.rhaos4.18.gitdad78d5.el9" apiVersion="v1" Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.651023 5003 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.73:6443: connect: connection refused Dec 06 15:32:01 crc kubenswrapper[5003]: E1206 15:32:01.651091 5003 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.73:6443: connect: connection refused" logger="UnhandledError" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.651149 5003 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-server-current.pem". Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.651744 5003 kubelet.go:854] "Not starting ClusterTrustBundle informer because we are in static kubelet mode" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.652208 5003 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/portworx-volume" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.652228 5003 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/empty-dir" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.652235 5003 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/git-repo" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.652242 5003 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/host-path" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.652253 5003 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/nfs" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.652260 5003 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/secret" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.652266 5003 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/iscsi" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.652277 5003 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/downward-api" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.652285 5003 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/fc" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.652293 5003 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/configmap" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.652302 5003 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/projected" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.652309 5003 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/local-volume" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.652558 5003 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/csi" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.652929 5003 server.go:1280] "Started kubelet" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.653585 5003 ratelimit.go:55] "Setting rate limiting for endpoint" service="podresources" qps=100 burstTokens=10 Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.653776 5003 server.go:163] "Starting to listen" address="0.0.0.0" port=10250 Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.653900 5003 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.73:6443: connect: connection refused Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.654181 5003 server.go:236] "Starting to serve the podresources API" endpoint="unix:/var/lib/kubelet/pod-resources/kubelet.sock" Dec 06 15:32:01 crc systemd[1]: Started Kubernetes Kubelet. Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.654964 5003 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate rotation is enabled Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.654991 5003 fs_resource_analyzer.go:67] "Starting FS ResourceAnalyzer" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.655384 5003 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-23 18:33:08.427157306 +0000 UTC Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.655517 5003 certificate_manager.go:356] kubernetes.io/kubelet-serving: Waiting 411h1m6.771645107s for next certificate rotation Dec 06 15:32:01 crc kubenswrapper[5003]: E1206 15:32:01.656220 5003 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.656325 5003 volume_manager.go:287] "The desired_state_of_world populator starts" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.656448 5003 volume_manager.go:289] "Starting Kubelet Volume Manager" Dec 06 15:32:01 crc kubenswrapper[5003]: E1206 15:32:01.657522 5003 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.73:6443: connect: connection refused" interval="200ms" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.657979 5003 desired_state_of_world_populator.go:146] "Desired state populator starts to run" Dec 06 15:32:01 crc kubenswrapper[5003]: E1206 15:32:01.659568 5003 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": dial tcp 38.102.83.73:6443: connect: connection refused" event="&Event{ObjectMeta:{crc.187eaa177235b572 default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-06 15:32:01.65290533 +0000 UTC m=+0.186259711,LastTimestamp:2025-12-06 15:32:01.65290533 +0000 UTC m=+0.186259711,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.662608 5003 factory.go:55] Registering systemd factory Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.662769 5003 factory.go:221] Registration of the systemd container factory successfully Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.662589 5003 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.73:6443: connect: connection refused Dec 06 15:32:01 crc kubenswrapper[5003]: E1206 15:32:01.662971 5003 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.73:6443: connect: connection refused" logger="UnhandledError" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.663391 5003 factory.go:153] Registering CRI-O factory Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.663413 5003 factory.go:221] Registration of the crio container factory successfully Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.663511 5003 factory.go:219] Registration of the containerd container factory failed: unable to create containerd client: containerd: cannot unix dial containerd api service: dial unix /run/containerd/containerd.sock: connect: no such file or directory Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.663538 5003 factory.go:103] Registering Raw factory Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.663564 5003 manager.go:1196] Started watching for new ooms in manager Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.664202 5003 manager.go:319] Starting recovery of all containers Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.670214 5003 server.go:460] "Adding debug handlers to kubelet server" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.675182 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.675269 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.675281 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.675296 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.675307 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.675321 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.675332 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.675346 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.675359 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.675369 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.675382 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.675392 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.675403 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.675415 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.675427 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.675437 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.675451 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.675462 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.675473 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.675502 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.675517 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3b6479f0-333b-4a96-9adf-2099afdc2447" volumeName="kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.675528 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.675563 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.675574 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.675587 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.675598 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.675616 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.675628 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.675640 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.675651 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.675720 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.675732 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.675743 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.675754 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.675765 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.675777 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.675788 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.675799 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.675832 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.675843 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.675854 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.675865 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.675875 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.675887 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.675899 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.675910 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.675943 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.675956 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.675967 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.675983 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.675995 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.676006 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.676042 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.676055 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.676089 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.676101 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.676112 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.676123 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.676134 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.676145 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.676155 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.676165 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.676229 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.676239 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.676254 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.676266 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.676276 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.676287 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49ef4625-1d3a-4a9f-b595-c2433d32326d" volumeName="kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.676297 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.676315 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.676381 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.676393 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.676404 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.676414 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.676424 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.676434 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" volumeName="kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.676446 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.676457 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.676510 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.676524 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.676535 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.676547 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d751cbb-f2e2-430d-9754-c882a5e924a5" volumeName="kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.676584 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.676596 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.676630 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.676642 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.676675 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.676691 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.676702 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.676713 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.676724 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.676734 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.676743 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.676756 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.676804 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.676814 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.676825 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.676835 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.676845 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.676857 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.676867 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.676900 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.676914 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.676931 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.676948 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.676959 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.676972 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.676984 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.677001 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.677015 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.677028 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.677040 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.677052 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.677065 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.677083 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.677097 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.677109 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.677132 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.677151 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.677163 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.677177 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.677190 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.677202 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="44663579-783b-4372-86d6-acf235a62d72" volumeName="kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.677216 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.677227 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.677240 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.677281 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.677294 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.677307 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.677320 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.677332 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.677344 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.677361 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.677412 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.677426 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.677448 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.677460 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.677476 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.677506 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.677524 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.677537 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.677553 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.677564 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.677609 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.677626 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.677636 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.677646 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.677657 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.677672 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.677684 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.677701 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.677713 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.677724 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.677739 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.677751 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.677763 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.677776 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.677788 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.677805 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.677815 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.677826 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.677836 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.677847 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.677857 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.677868 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.677879 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.677895 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.677910 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.677920 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.677931 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.677940 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.677950 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.677960 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.677971 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.677980 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.677990 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.677998 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.678008 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.678017 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.678028 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.678064 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.678076 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.679147 5003 reconstruct.go:144] "Volume is marked device as uncertain and added into the actual state" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" deviceMountPath="/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.679173 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.679189 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.679202 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.679214 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.679225 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.679235 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.679246 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.679257 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.679268 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.679279 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.679291 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.679301 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.679312 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.679323 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.679334 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.679346 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.679359 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" volumeName="kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.679370 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.679382 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.679397 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.679408 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.679420 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.679432 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.679444 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.679457 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" seLinuxMountContext="" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.679469 5003 reconstruct.go:97] "Volume reconstruction finished" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.679478 5003 reconciler.go:26] "Reconciler: start to sync state" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.699059 5003 manager.go:324] Recovery completed Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.708691 5003 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv4" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.710648 5003 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv6" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.710744 5003 status_manager.go:217] "Starting to sync pod status with apiserver" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.710824 5003 kubelet.go:2335] "Starting kubelet main sync loop" Dec 06 15:32:01 crc kubenswrapper[5003]: E1206 15:32:01.711127 5003 kubelet.go:2359] "Skipping pod synchronization" err="[container runtime status check may not have completed yet, PLEG is not healthy: pleg has yet to be successful]" Dec 06 15:32:01 crc kubenswrapper[5003]: W1206 15:32:01.712002 5003 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.73:6443: connect: connection refused Dec 06 15:32:01 crc kubenswrapper[5003]: E1206 15:32:01.712070 5003 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.73:6443: connect: connection refused" logger="UnhandledError" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.714333 5003 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.715578 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.715781 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.715879 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.716528 5003 cpu_manager.go:225] "Starting CPU manager" policy="none" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.716554 5003 cpu_manager.go:226] "Reconciling" reconcilePeriod="10s" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.716579 5003 state_mem.go:36] "Initialized new in-memory state store" Dec 06 15:32:01 crc kubenswrapper[5003]: E1206 15:32:01.756801 5003 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.771399 5003 policy_none.go:49] "None policy: Start" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.772560 5003 memory_manager.go:170] "Starting memorymanager" policy="None" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.772674 5003 state_mem.go:35] "Initializing new in-memory state store" Dec 06 15:32:01 crc kubenswrapper[5003]: E1206 15:32:01.812231 5003 kubelet.go:2359] "Skipping pod synchronization" err="container runtime status check may not have completed yet" Dec 06 15:32:01 crc kubenswrapper[5003]: E1206 15:32:01.857049 5003 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.858663 5003 manager.go:334] "Starting Device Plugin manager" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.858734 5003 manager.go:513] "Failed to read data from checkpoint" checkpoint="kubelet_internal_checkpoint" err="checkpoint is not found" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.858747 5003 server.go:79] "Starting device plugin registration server" Dec 06 15:32:01 crc kubenswrapper[5003]: E1206 15:32:01.858875 5003 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.73:6443: connect: connection refused" interval="400ms" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.859224 5003 eviction_manager.go:189] "Eviction manager: starting control loop" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.859238 5003 container_log_manager.go:189] "Initializing container log rotate workers" workers=1 monitorPeriod="10s" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.859361 5003 plugin_watcher.go:51] "Plugin Watcher Start" path="/var/lib/kubelet/plugins_registry" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.859465 5003 plugin_manager.go:116] "The desired_state_of_world populator (plugin watcher) starts" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.859511 5003 plugin_manager.go:118] "Starting Kubelet Plugin Manager" Dec 06 15:32:01 crc kubenswrapper[5003]: E1206 15:32:01.866036 5003 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.960308 5003 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.961687 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.961716 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.961724 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:01 crc kubenswrapper[5003]: I1206 15:32:01.961746 5003 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 06 15:32:01 crc kubenswrapper[5003]: E1206 15:32:01.962177 5003 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.73:6443: connect: connection refused" node="crc" Dec 06 15:32:02 crc kubenswrapper[5003]: I1206 15:32:02.013437 5003 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-machine-config-operator/kube-rbac-proxy-crio-crc","openshift-etcd/etcd-crc","openshift-kube-apiserver/kube-apiserver-crc","openshift-kube-controller-manager/kube-controller-manager-crc","openshift-kube-scheduler/openshift-kube-scheduler-crc"] Dec 06 15:32:02 crc kubenswrapper[5003]: I1206 15:32:02.013597 5003 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 06 15:32:02 crc kubenswrapper[5003]: I1206 15:32:02.015014 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:02 crc kubenswrapper[5003]: I1206 15:32:02.015068 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:02 crc kubenswrapper[5003]: I1206 15:32:02.015091 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:02 crc kubenswrapper[5003]: I1206 15:32:02.015261 5003 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 06 15:32:02 crc kubenswrapper[5003]: I1206 15:32:02.015372 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 06 15:32:02 crc kubenswrapper[5003]: I1206 15:32:02.015399 5003 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 06 15:32:02 crc kubenswrapper[5003]: I1206 15:32:02.016376 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:02 crc kubenswrapper[5003]: I1206 15:32:02.016818 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:02 crc kubenswrapper[5003]: I1206 15:32:02.017057 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:02 crc kubenswrapper[5003]: I1206 15:32:02.016384 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:02 crc kubenswrapper[5003]: I1206 15:32:02.017324 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:02 crc kubenswrapper[5003]: I1206 15:32:02.017335 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:02 crc kubenswrapper[5003]: I1206 15:32:02.017453 5003 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 06 15:32:02 crc kubenswrapper[5003]: I1206 15:32:02.017826 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Dec 06 15:32:02 crc kubenswrapper[5003]: I1206 15:32:02.017902 5003 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 06 15:32:02 crc kubenswrapper[5003]: I1206 15:32:02.018112 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:02 crc kubenswrapper[5003]: I1206 15:32:02.018129 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:02 crc kubenswrapper[5003]: I1206 15:32:02.018136 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:02 crc kubenswrapper[5003]: I1206 15:32:02.018229 5003 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 06 15:32:02 crc kubenswrapper[5003]: I1206 15:32:02.018431 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 06 15:32:02 crc kubenswrapper[5003]: I1206 15:32:02.018547 5003 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 06 15:32:02 crc kubenswrapper[5003]: I1206 15:32:02.018872 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:02 crc kubenswrapper[5003]: I1206 15:32:02.018921 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:02 crc kubenswrapper[5003]: I1206 15:32:02.018937 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:02 crc kubenswrapper[5003]: I1206 15:32:02.019157 5003 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 06 15:32:02 crc kubenswrapper[5003]: I1206 15:32:02.019792 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 06 15:32:02 crc kubenswrapper[5003]: I1206 15:32:02.020023 5003 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 06 15:32:02 crc kubenswrapper[5003]: I1206 15:32:02.020074 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:02 crc kubenswrapper[5003]: I1206 15:32:02.020277 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:02 crc kubenswrapper[5003]: I1206 15:32:02.020294 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:02 crc kubenswrapper[5003]: I1206 15:32:02.020222 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:02 crc kubenswrapper[5003]: I1206 15:32:02.020433 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:02 crc kubenswrapper[5003]: I1206 15:32:02.020467 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:02 crc kubenswrapper[5003]: I1206 15:32:02.020797 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 06 15:32:02 crc kubenswrapper[5003]: I1206 15:32:02.020848 5003 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 06 15:32:02 crc kubenswrapper[5003]: I1206 15:32:02.021553 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:02 crc kubenswrapper[5003]: I1206 15:32:02.021582 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:02 crc kubenswrapper[5003]: I1206 15:32:02.021598 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:02 crc kubenswrapper[5003]: I1206 15:32:02.021900 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:02 crc kubenswrapper[5003]: I1206 15:32:02.021951 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:02 crc kubenswrapper[5003]: I1206 15:32:02.021975 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:02 crc kubenswrapper[5003]: I1206 15:32:02.022952 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:02 crc kubenswrapper[5003]: I1206 15:32:02.022985 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:02 crc kubenswrapper[5003]: I1206 15:32:02.023001 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:02 crc kubenswrapper[5003]: I1206 15:32:02.086043 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 06 15:32:02 crc kubenswrapper[5003]: I1206 15:32:02.086072 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 06 15:32:02 crc kubenswrapper[5003]: I1206 15:32:02.086089 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 06 15:32:02 crc kubenswrapper[5003]: I1206 15:32:02.086110 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 06 15:32:02 crc kubenswrapper[5003]: I1206 15:32:02.086123 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 06 15:32:02 crc kubenswrapper[5003]: I1206 15:32:02.086156 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 06 15:32:02 crc kubenswrapper[5003]: I1206 15:32:02.086189 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 06 15:32:02 crc kubenswrapper[5003]: I1206 15:32:02.086220 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 06 15:32:02 crc kubenswrapper[5003]: I1206 15:32:02.086255 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 06 15:32:02 crc kubenswrapper[5003]: I1206 15:32:02.086270 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 06 15:32:02 crc kubenswrapper[5003]: I1206 15:32:02.086283 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 06 15:32:02 crc kubenswrapper[5003]: I1206 15:32:02.086333 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 06 15:32:02 crc kubenswrapper[5003]: I1206 15:32:02.086356 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 06 15:32:02 crc kubenswrapper[5003]: I1206 15:32:02.086400 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 06 15:32:02 crc kubenswrapper[5003]: I1206 15:32:02.086413 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 06 15:32:02 crc kubenswrapper[5003]: I1206 15:32:02.162566 5003 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 06 15:32:02 crc kubenswrapper[5003]: I1206 15:32:02.163835 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:02 crc kubenswrapper[5003]: I1206 15:32:02.163875 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:02 crc kubenswrapper[5003]: I1206 15:32:02.163883 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:02 crc kubenswrapper[5003]: I1206 15:32:02.165613 5003 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 06 15:32:02 crc kubenswrapper[5003]: E1206 15:32:02.167276 5003 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.73:6443: connect: connection refused" node="crc" Dec 06 15:32:02 crc kubenswrapper[5003]: I1206 15:32:02.187060 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 06 15:32:02 crc kubenswrapper[5003]: I1206 15:32:02.187093 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 06 15:32:02 crc kubenswrapper[5003]: I1206 15:32:02.187110 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 06 15:32:02 crc kubenswrapper[5003]: I1206 15:32:02.187127 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 06 15:32:02 crc kubenswrapper[5003]: I1206 15:32:02.187140 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 06 15:32:02 crc kubenswrapper[5003]: I1206 15:32:02.187156 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 06 15:32:02 crc kubenswrapper[5003]: I1206 15:32:02.187176 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 06 15:32:02 crc kubenswrapper[5003]: I1206 15:32:02.187196 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 06 15:32:02 crc kubenswrapper[5003]: I1206 15:32:02.187216 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 06 15:32:02 crc kubenswrapper[5003]: I1206 15:32:02.187236 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 06 15:32:02 crc kubenswrapper[5003]: I1206 15:32:02.187254 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 06 15:32:02 crc kubenswrapper[5003]: I1206 15:32:02.187272 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 06 15:32:02 crc kubenswrapper[5003]: I1206 15:32:02.187267 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 06 15:32:02 crc kubenswrapper[5003]: I1206 15:32:02.187320 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 06 15:32:02 crc kubenswrapper[5003]: I1206 15:32:02.187322 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 06 15:32:02 crc kubenswrapper[5003]: I1206 15:32:02.187289 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 06 15:32:02 crc kubenswrapper[5003]: I1206 15:32:02.187364 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 06 15:32:02 crc kubenswrapper[5003]: I1206 15:32:02.187400 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 06 15:32:02 crc kubenswrapper[5003]: I1206 15:32:02.187409 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 06 15:32:02 crc kubenswrapper[5003]: I1206 15:32:02.187424 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 06 15:32:02 crc kubenswrapper[5003]: I1206 15:32:02.187384 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 06 15:32:02 crc kubenswrapper[5003]: I1206 15:32:02.187439 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 06 15:32:02 crc kubenswrapper[5003]: I1206 15:32:02.187462 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 06 15:32:02 crc kubenswrapper[5003]: I1206 15:32:02.187463 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 06 15:32:02 crc kubenswrapper[5003]: I1206 15:32:02.187478 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 06 15:32:02 crc kubenswrapper[5003]: I1206 15:32:02.187527 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 06 15:32:02 crc kubenswrapper[5003]: I1206 15:32:02.187528 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 06 15:32:02 crc kubenswrapper[5003]: I1206 15:32:02.187551 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 06 15:32:02 crc kubenswrapper[5003]: I1206 15:32:02.187638 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 06 15:32:02 crc kubenswrapper[5003]: I1206 15:32:02.187704 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 06 15:32:02 crc kubenswrapper[5003]: E1206 15:32:02.260147 5003 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.73:6443: connect: connection refused" interval="800ms" Dec 06 15:32:02 crc kubenswrapper[5003]: I1206 15:32:02.346090 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 06 15:32:02 crc kubenswrapper[5003]: I1206 15:32:02.356730 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 06 15:32:02 crc kubenswrapper[5003]: I1206 15:32:02.371305 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Dec 06 15:32:02 crc kubenswrapper[5003]: I1206 15:32:02.382802 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 06 15:32:02 crc kubenswrapper[5003]: W1206 15:32:02.384580 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd1b160f5dda77d281dd8e69ec8d817f9.slice/crio-6f595b06add2b14169a06c44e06a2c05f47e1c580abc2e79b4dd1c03aa073a2d WatchSource:0}: Error finding container 6f595b06add2b14169a06c44e06a2c05f47e1c580abc2e79b4dd1c03aa073a2d: Status 404 returned error can't find the container with id 6f595b06add2b14169a06c44e06a2c05f47e1c580abc2e79b4dd1c03aa073a2d Dec 06 15:32:02 crc kubenswrapper[5003]: W1206 15:32:02.384921 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf4b27818a5e8e43d0dc095d08835c792.slice/crio-223d94df99b16c4e0d5d9656ac356f6f49f5f76a7bc242413deaf4054b0d2cf1 WatchSource:0}: Error finding container 223d94df99b16c4e0d5d9656ac356f6f49f5f76a7bc242413deaf4054b0d2cf1: Status 404 returned error can't find the container with id 223d94df99b16c4e0d5d9656ac356f6f49f5f76a7bc242413deaf4054b0d2cf1 Dec 06 15:32:02 crc kubenswrapper[5003]: I1206 15:32:02.388837 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 06 15:32:02 crc kubenswrapper[5003]: W1206 15:32:02.398235 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3dcd261975c3d6b9a6ad6367fd4facd3.slice/crio-73d20c082a8910387d16967e2e564f5af0f043e94fc1ef35ab8aefe5a72e0b7b WatchSource:0}: Error finding container 73d20c082a8910387d16967e2e564f5af0f043e94fc1ef35ab8aefe5a72e0b7b: Status 404 returned error can't find the container with id 73d20c082a8910387d16967e2e564f5af0f043e94fc1ef35ab8aefe5a72e0b7b Dec 06 15:32:02 crc kubenswrapper[5003]: I1206 15:32:02.567895 5003 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 06 15:32:02 crc kubenswrapper[5003]: I1206 15:32:02.570204 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:02 crc kubenswrapper[5003]: I1206 15:32:02.570653 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:02 crc kubenswrapper[5003]: I1206 15:32:02.570670 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:02 crc kubenswrapper[5003]: I1206 15:32:02.570702 5003 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 06 15:32:02 crc kubenswrapper[5003]: E1206 15:32:02.571422 5003 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.73:6443: connect: connection refused" node="crc" Dec 06 15:32:02 crc kubenswrapper[5003]: I1206 15:32:02.655308 5003 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.73:6443: connect: connection refused Dec 06 15:32:02 crc kubenswrapper[5003]: I1206 15:32:02.714972 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"b4ff94d858a6f98016db3f09d7c1f8cfd95a9537b2fc58118d2c8c35bf287e5c"} Dec 06 15:32:02 crc kubenswrapper[5003]: I1206 15:32:02.716132 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"73d20c082a8910387d16967e2e564f5af0f043e94fc1ef35ab8aefe5a72e0b7b"} Dec 06 15:32:02 crc kubenswrapper[5003]: I1206 15:32:02.717102 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"8f4529972ac44db2c048583da1f83b18493b2fb883189a3cbf4b10a303233b73"} Dec 06 15:32:02 crc kubenswrapper[5003]: I1206 15:32:02.717998 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"223d94df99b16c4e0d5d9656ac356f6f49f5f76a7bc242413deaf4054b0d2cf1"} Dec 06 15:32:02 crc kubenswrapper[5003]: I1206 15:32:02.718800 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"6f595b06add2b14169a06c44e06a2c05f47e1c580abc2e79b4dd1c03aa073a2d"} Dec 06 15:32:02 crc kubenswrapper[5003]: W1206 15:32:02.730236 5003 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.73:6443: connect: connection refused Dec 06 15:32:02 crc kubenswrapper[5003]: E1206 15:32:02.730311 5003 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.73:6443: connect: connection refused" logger="UnhandledError" Dec 06 15:32:03 crc kubenswrapper[5003]: W1206 15:32:03.003130 5003 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.73:6443: connect: connection refused Dec 06 15:32:03 crc kubenswrapper[5003]: E1206 15:32:03.003264 5003 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.73:6443: connect: connection refused" logger="UnhandledError" Dec 06 15:32:03 crc kubenswrapper[5003]: W1206 15:32:03.057801 5003 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.73:6443: connect: connection refused Dec 06 15:32:03 crc kubenswrapper[5003]: E1206 15:32:03.057891 5003 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.73:6443: connect: connection refused" logger="UnhandledError" Dec 06 15:32:03 crc kubenswrapper[5003]: E1206 15:32:03.061542 5003 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.73:6443: connect: connection refused" interval="1.6s" Dec 06 15:32:03 crc kubenswrapper[5003]: W1206 15:32:03.067905 5003 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.73:6443: connect: connection refused Dec 06 15:32:03 crc kubenswrapper[5003]: E1206 15:32:03.067968 5003 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.73:6443: connect: connection refused" logger="UnhandledError" Dec 06 15:32:03 crc kubenswrapper[5003]: I1206 15:32:03.372419 5003 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 06 15:32:03 crc kubenswrapper[5003]: I1206 15:32:03.373863 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:03 crc kubenswrapper[5003]: I1206 15:32:03.373889 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:03 crc kubenswrapper[5003]: I1206 15:32:03.373898 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:03 crc kubenswrapper[5003]: I1206 15:32:03.373917 5003 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 06 15:32:03 crc kubenswrapper[5003]: E1206 15:32:03.374258 5003 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.73:6443: connect: connection refused" node="crc" Dec 06 15:32:03 crc kubenswrapper[5003]: I1206 15:32:03.654664 5003 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.73:6443: connect: connection refused Dec 06 15:32:03 crc kubenswrapper[5003]: I1206 15:32:03.677300 5003 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Rotating certificates Dec 06 15:32:03 crc kubenswrapper[5003]: E1206 15:32:03.678546 5003 certificate_manager.go:562] "Unhandled Error" err="kubernetes.io/kube-apiserver-client-kubelet: Failed while requesting a signed certificate from the control plane: cannot create certificate signing request: Post \"https://api-int.crc.testing:6443/apis/certificates.k8s.io/v1/certificatesigningrequests\": dial tcp 38.102.83.73:6443: connect: connection refused" logger="UnhandledError" Dec 06 15:32:03 crc kubenswrapper[5003]: I1206 15:32:03.724106 5003 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="4256540a96bdcd018eb514cf70eea305513bb418afa89c8bfa5179c67822380e" exitCode=0 Dec 06 15:32:03 crc kubenswrapper[5003]: I1206 15:32:03.724200 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"4256540a96bdcd018eb514cf70eea305513bb418afa89c8bfa5179c67822380e"} Dec 06 15:32:03 crc kubenswrapper[5003]: I1206 15:32:03.724263 5003 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 06 15:32:03 crc kubenswrapper[5003]: I1206 15:32:03.725405 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:03 crc kubenswrapper[5003]: I1206 15:32:03.725453 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:03 crc kubenswrapper[5003]: I1206 15:32:03.725466 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:03 crc kubenswrapper[5003]: I1206 15:32:03.725587 5003 generic.go:334] "Generic (PLEG): container finished" podID="d1b160f5dda77d281dd8e69ec8d817f9" containerID="f0bc21cca67ba755fbda0c75b39c6240f21f3ee0e682ed599b10145dc8eb1c65" exitCode=0 Dec 06 15:32:03 crc kubenswrapper[5003]: I1206 15:32:03.725643 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerDied","Data":"f0bc21cca67ba755fbda0c75b39c6240f21f3ee0e682ed599b10145dc8eb1c65"} Dec 06 15:32:03 crc kubenswrapper[5003]: I1206 15:32:03.725758 5003 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 06 15:32:03 crc kubenswrapper[5003]: I1206 15:32:03.726714 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:03 crc kubenswrapper[5003]: I1206 15:32:03.726756 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:03 crc kubenswrapper[5003]: I1206 15:32:03.726773 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:03 crc kubenswrapper[5003]: I1206 15:32:03.727676 5003 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 06 15:32:03 crc kubenswrapper[5003]: I1206 15:32:03.729028 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:03 crc kubenswrapper[5003]: I1206 15:32:03.729069 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:03 crc kubenswrapper[5003]: I1206 15:32:03.729078 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:03 crc kubenswrapper[5003]: I1206 15:32:03.731385 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"a3377599a5ceebbce7bd9d45a6f78122fc48d86ff917a6dcfc03304ca3361659"} Dec 06 15:32:03 crc kubenswrapper[5003]: I1206 15:32:03.731424 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"b5e7cb74b8b6e2a00de6dbbd1689b52fcc3ae9862d40685bab7a805646abd4a3"} Dec 06 15:32:03 crc kubenswrapper[5003]: I1206 15:32:03.731434 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"d0880a937c71f4a7b46ffc4dcc10f50f5db23655dedb5d7e95b36cf0fcb44928"} Dec 06 15:32:03 crc kubenswrapper[5003]: I1206 15:32:03.731443 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"6f5d5d47cea1d06df2f36a0a15126c2419e5051d84842c781921577bec3c65f1"} Dec 06 15:32:03 crc kubenswrapper[5003]: I1206 15:32:03.731552 5003 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 06 15:32:03 crc kubenswrapper[5003]: I1206 15:32:03.732367 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:03 crc kubenswrapper[5003]: I1206 15:32:03.732388 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:03 crc kubenswrapper[5003]: I1206 15:32:03.732396 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:03 crc kubenswrapper[5003]: I1206 15:32:03.733566 5003 generic.go:334] "Generic (PLEG): container finished" podID="3dcd261975c3d6b9a6ad6367fd4facd3" containerID="3bd16db35730810799b00301bb8a68f91d69dc93dac04638d5c187bdd374393d" exitCode=0 Dec 06 15:32:03 crc kubenswrapper[5003]: I1206 15:32:03.733631 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerDied","Data":"3bd16db35730810799b00301bb8a68f91d69dc93dac04638d5c187bdd374393d"} Dec 06 15:32:03 crc kubenswrapper[5003]: I1206 15:32:03.733639 5003 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 06 15:32:03 crc kubenswrapper[5003]: I1206 15:32:03.734362 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:03 crc kubenswrapper[5003]: I1206 15:32:03.734412 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:03 crc kubenswrapper[5003]: I1206 15:32:03.734421 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:03 crc kubenswrapper[5003]: I1206 15:32:03.736936 5003 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="bd8ae927f5dc1164e0be3c62baac330d746d3dae3167cb971054c6a3f50ce345" exitCode=0 Dec 06 15:32:03 crc kubenswrapper[5003]: I1206 15:32:03.736963 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"bd8ae927f5dc1164e0be3c62baac330d746d3dae3167cb971054c6a3f50ce345"} Dec 06 15:32:03 crc kubenswrapper[5003]: I1206 15:32:03.737109 5003 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 06 15:32:03 crc kubenswrapper[5003]: I1206 15:32:03.738381 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:03 crc kubenswrapper[5003]: I1206 15:32:03.738411 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:03 crc kubenswrapper[5003]: I1206 15:32:03.738420 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:04 crc kubenswrapper[5003]: I1206 15:32:04.655285 5003 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.73:6443: connect: connection refused Dec 06 15:32:04 crc kubenswrapper[5003]: E1206 15:32:04.662711 5003 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.73:6443: connect: connection refused" interval="3.2s" Dec 06 15:32:04 crc kubenswrapper[5003]: I1206 15:32:04.741873 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"efc65bc9bb60202069cb51eed80fb62e527399c4189554e791bab3e23993c95c"} Dec 06 15:32:04 crc kubenswrapper[5003]: I1206 15:32:04.741925 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"b6013b3a95ef99138e6ea971b51a45f65923c09435a2595bba7af91eebb28d76"} Dec 06 15:32:04 crc kubenswrapper[5003]: I1206 15:32:04.741941 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"c777cc5d07ff005aa8001da8a566484710f0253e4a6061e3297a884c6153a7d0"} Dec 06 15:32:04 crc kubenswrapper[5003]: I1206 15:32:04.741952 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"9ac8291e7fa9a5ff379474802b6eae771542705aef4c443cc3123837d10dd9e5"} Dec 06 15:32:04 crc kubenswrapper[5003]: I1206 15:32:04.741962 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"c9f683cdba22afa5d1d069cc32c6c83addf344c8b0ba3b39b7a2ca527408922c"} Dec 06 15:32:04 crc kubenswrapper[5003]: I1206 15:32:04.742078 5003 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 06 15:32:04 crc kubenswrapper[5003]: I1206 15:32:04.742878 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:04 crc kubenswrapper[5003]: I1206 15:32:04.742895 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:04 crc kubenswrapper[5003]: I1206 15:32:04.742903 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:04 crc kubenswrapper[5003]: I1206 15:32:04.744627 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"78c40ea8b57b79ee32adfe2fcafc8c6e887aa063a4d6b3466818fdd0d204561a"} Dec 06 15:32:04 crc kubenswrapper[5003]: I1206 15:32:04.744694 5003 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 06 15:32:04 crc kubenswrapper[5003]: I1206 15:32:04.745294 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:04 crc kubenswrapper[5003]: I1206 15:32:04.745315 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:04 crc kubenswrapper[5003]: I1206 15:32:04.745322 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:04 crc kubenswrapper[5003]: I1206 15:32:04.747153 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"94df3e8c0295aedc3bf7b97296d443b5240d17fcd83f8e8cf1bc9730740d7f29"} Dec 06 15:32:04 crc kubenswrapper[5003]: I1206 15:32:04.747178 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"8c1591171f15bdf52339cb914e52de4dad9c34f1a6b6bb882f15bb41308a5b7f"} Dec 06 15:32:04 crc kubenswrapper[5003]: I1206 15:32:04.747188 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"15818f84cfa472a42a18bafe5ff4a71da326b2f5871f47e693d5e1a1b3c8b986"} Dec 06 15:32:04 crc kubenswrapper[5003]: I1206 15:32:04.747244 5003 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 06 15:32:04 crc kubenswrapper[5003]: I1206 15:32:04.747852 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:04 crc kubenswrapper[5003]: I1206 15:32:04.747880 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:04 crc kubenswrapper[5003]: I1206 15:32:04.747891 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:04 crc kubenswrapper[5003]: I1206 15:32:04.749192 5003 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="fdc40421549e4a1452b2c3d3ac2cdf29143dae118af75b8049d0e455c1d5d256" exitCode=0 Dec 06 15:32:04 crc kubenswrapper[5003]: I1206 15:32:04.749277 5003 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 06 15:32:04 crc kubenswrapper[5003]: I1206 15:32:04.749638 5003 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 06 15:32:04 crc kubenswrapper[5003]: I1206 15:32:04.749912 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"fdc40421549e4a1452b2c3d3ac2cdf29143dae118af75b8049d0e455c1d5d256"} Dec 06 15:32:04 crc kubenswrapper[5003]: I1206 15:32:04.750229 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:04 crc kubenswrapper[5003]: I1206 15:32:04.750248 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:04 crc kubenswrapper[5003]: I1206 15:32:04.750256 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:04 crc kubenswrapper[5003]: I1206 15:32:04.750785 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:04 crc kubenswrapper[5003]: I1206 15:32:04.750808 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:04 crc kubenswrapper[5003]: I1206 15:32:04.750816 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:04 crc kubenswrapper[5003]: I1206 15:32:04.975036 5003 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 06 15:32:04 crc kubenswrapper[5003]: I1206 15:32:04.976072 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:04 crc kubenswrapper[5003]: I1206 15:32:04.976102 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:04 crc kubenswrapper[5003]: I1206 15:32:04.976116 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:04 crc kubenswrapper[5003]: I1206 15:32:04.976141 5003 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 06 15:32:05 crc kubenswrapper[5003]: I1206 15:32:05.363629 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 06 15:32:05 crc kubenswrapper[5003]: I1206 15:32:05.515623 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 06 15:32:05 crc kubenswrapper[5003]: I1206 15:32:05.752694 5003 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="72ad699d40d6bbae37ece0be57e00cf8198c543af71475186ba6bca233e19c59" exitCode=0 Dec 06 15:32:05 crc kubenswrapper[5003]: I1206 15:32:05.752812 5003 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 06 15:32:05 crc kubenswrapper[5003]: I1206 15:32:05.752824 5003 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 06 15:32:05 crc kubenswrapper[5003]: I1206 15:32:05.752845 5003 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 06 15:32:05 crc kubenswrapper[5003]: I1206 15:32:05.752875 5003 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 06 15:32:05 crc kubenswrapper[5003]: I1206 15:32:05.752938 5003 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 06 15:32:05 crc kubenswrapper[5003]: I1206 15:32:05.752820 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"72ad699d40d6bbae37ece0be57e00cf8198c543af71475186ba6bca233e19c59"} Dec 06 15:32:05 crc kubenswrapper[5003]: I1206 15:32:05.752824 5003 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 06 15:32:05 crc kubenswrapper[5003]: I1206 15:32:05.754022 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:05 crc kubenswrapper[5003]: I1206 15:32:05.754032 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:05 crc kubenswrapper[5003]: I1206 15:32:05.754046 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:05 crc kubenswrapper[5003]: I1206 15:32:05.754057 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:05 crc kubenswrapper[5003]: I1206 15:32:05.754046 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:05 crc kubenswrapper[5003]: I1206 15:32:05.754083 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:05 crc kubenswrapper[5003]: I1206 15:32:05.754048 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:05 crc kubenswrapper[5003]: I1206 15:32:05.754118 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:05 crc kubenswrapper[5003]: I1206 15:32:05.754095 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:05 crc kubenswrapper[5003]: I1206 15:32:05.754593 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:05 crc kubenswrapper[5003]: I1206 15:32:05.754616 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:05 crc kubenswrapper[5003]: I1206 15:32:05.754623 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:05 crc kubenswrapper[5003]: I1206 15:32:05.754596 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:05 crc kubenswrapper[5003]: I1206 15:32:05.754685 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:05 crc kubenswrapper[5003]: I1206 15:32:05.754699 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:05 crc kubenswrapper[5003]: I1206 15:32:05.934322 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 06 15:32:05 crc kubenswrapper[5003]: I1206 15:32:05.997726 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 06 15:32:06 crc kubenswrapper[5003]: I1206 15:32:06.537529 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 06 15:32:06 crc kubenswrapper[5003]: I1206 15:32:06.758572 5003 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 06 15:32:06 crc kubenswrapper[5003]: I1206 15:32:06.758656 5003 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 06 15:32:06 crc kubenswrapper[5003]: I1206 15:32:06.758671 5003 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 06 15:32:06 crc kubenswrapper[5003]: I1206 15:32:06.758568 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"269db465fc9bbf92ebd14dfe26ad8e6c24df17e114552c436de2176f1a58d326"} Dec 06 15:32:06 crc kubenswrapper[5003]: I1206 15:32:06.758673 5003 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 06 15:32:06 crc kubenswrapper[5003]: I1206 15:32:06.758812 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"7b526ac47de443d35197518b6b4636600747093ce094cd30138d6b086b0f7da5"} Dec 06 15:32:06 crc kubenswrapper[5003]: I1206 15:32:06.759017 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"c340e7bd862f75d1d2d720236b9938e0749dc5089a0edf227045408ea8aa34a0"} Dec 06 15:32:06 crc kubenswrapper[5003]: I1206 15:32:06.759043 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"34f40b931fd028c9fd16760831dc5bcf1043536e93452caacf8ac5d1bf59d1de"} Dec 06 15:32:06 crc kubenswrapper[5003]: I1206 15:32:06.759694 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:06 crc kubenswrapper[5003]: I1206 15:32:06.759715 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:06 crc kubenswrapper[5003]: I1206 15:32:06.759723 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:06 crc kubenswrapper[5003]: I1206 15:32:06.760098 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:06 crc kubenswrapper[5003]: I1206 15:32:06.760121 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:06 crc kubenswrapper[5003]: I1206 15:32:06.760129 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:06 crc kubenswrapper[5003]: I1206 15:32:06.760346 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:06 crc kubenswrapper[5003]: I1206 15:32:06.760369 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:06 crc kubenswrapper[5003]: I1206 15:32:06.760378 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:07 crc kubenswrapper[5003]: I1206 15:32:07.764805 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"52de3cb379ae4baa685f8adb4b0cbc848ac1823dd1c3e3e84df47c83bf5ef898"} Dec 06 15:32:07 crc kubenswrapper[5003]: I1206 15:32:07.764845 5003 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 06 15:32:07 crc kubenswrapper[5003]: I1206 15:32:07.765305 5003 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 06 15:32:07 crc kubenswrapper[5003]: I1206 15:32:07.764901 5003 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 06 15:32:07 crc kubenswrapper[5003]: I1206 15:32:07.766626 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:07 crc kubenswrapper[5003]: I1206 15:32:07.766664 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:07 crc kubenswrapper[5003]: I1206 15:32:07.766675 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:07 crc kubenswrapper[5003]: I1206 15:32:07.767432 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:07 crc kubenswrapper[5003]: I1206 15:32:07.767472 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:07 crc kubenswrapper[5003]: I1206 15:32:07.767503 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:07 crc kubenswrapper[5003]: I1206 15:32:07.840326 5003 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Rotating certificates Dec 06 15:32:08 crc kubenswrapper[5003]: I1206 15:32:08.767008 5003 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 06 15:32:08 crc kubenswrapper[5003]: I1206 15:32:08.767826 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:08 crc kubenswrapper[5003]: I1206 15:32:08.767860 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:08 crc kubenswrapper[5003]: I1206 15:32:08.767870 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:10 crc kubenswrapper[5003]: I1206 15:32:10.280188 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-etcd/etcd-crc" Dec 06 15:32:10 crc kubenswrapper[5003]: I1206 15:32:10.280612 5003 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 06 15:32:10 crc kubenswrapper[5003]: I1206 15:32:10.282564 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:10 crc kubenswrapper[5003]: I1206 15:32:10.282618 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:10 crc kubenswrapper[5003]: I1206 15:32:10.282635 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:10 crc kubenswrapper[5003]: I1206 15:32:10.493787 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 06 15:32:10 crc kubenswrapper[5003]: I1206 15:32:10.494078 5003 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 06 15:32:10 crc kubenswrapper[5003]: I1206 15:32:10.496186 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:10 crc kubenswrapper[5003]: I1206 15:32:10.496245 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:10 crc kubenswrapper[5003]: I1206 15:32:10.496270 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:10 crc kubenswrapper[5003]: I1206 15:32:10.703441 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 06 15:32:10 crc kubenswrapper[5003]: I1206 15:32:10.704031 5003 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 06 15:32:10 crc kubenswrapper[5003]: I1206 15:32:10.705810 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:10 crc kubenswrapper[5003]: I1206 15:32:10.705864 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:10 crc kubenswrapper[5003]: I1206 15:32:10.705886 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:11 crc kubenswrapper[5003]: E1206 15:32:11.866189 5003 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Dec 06 15:32:12 crc kubenswrapper[5003]: I1206 15:32:12.523293 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 06 15:32:12 crc kubenswrapper[5003]: I1206 15:32:12.523648 5003 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 06 15:32:12 crc kubenswrapper[5003]: I1206 15:32:12.525100 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:12 crc kubenswrapper[5003]: I1206 15:32:12.525135 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:12 crc kubenswrapper[5003]: I1206 15:32:12.525146 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:12 crc kubenswrapper[5003]: I1206 15:32:12.529261 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 06 15:32:12 crc kubenswrapper[5003]: I1206 15:32:12.779114 5003 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 06 15:32:12 crc kubenswrapper[5003]: I1206 15:32:12.781048 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:12 crc kubenswrapper[5003]: I1206 15:32:12.781305 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:12 crc kubenswrapper[5003]: I1206 15:32:12.781522 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:12 crc kubenswrapper[5003]: I1206 15:32:12.787619 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 06 15:32:13 crc kubenswrapper[5003]: I1206 15:32:13.494857 5003 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/cluster-policy-controller namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10357/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" start-of-body= Dec 06 15:32:13 crc kubenswrapper[5003]: I1206 15:32:13.494988 5003 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="cluster-policy-controller" probeResult="failure" output="Get \"https://192.168.126.11:10357/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 06 15:32:13 crc kubenswrapper[5003]: I1206 15:32:13.781829 5003 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 06 15:32:13 crc kubenswrapper[5003]: I1206 15:32:13.783022 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:13 crc kubenswrapper[5003]: I1206 15:32:13.783053 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:13 crc kubenswrapper[5003]: I1206 15:32:13.783063 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:14 crc kubenswrapper[5003]: E1206 15:32:14.977367 5003 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": net/http: TLS handshake timeout" node="crc" Dec 06 15:32:15 crc kubenswrapper[5003]: W1206 15:32:15.063421 5003 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": net/http: TLS handshake timeout Dec 06 15:32:15 crc kubenswrapper[5003]: I1206 15:32:15.063539 5003 trace.go:236] Trace[142859967]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (06-Dec-2025 15:32:05.061) (total time: 10001ms): Dec 06 15:32:15 crc kubenswrapper[5003]: Trace[142859967]: ---"Objects listed" error:Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": net/http: TLS handshake timeout 10001ms (15:32:15.063) Dec 06 15:32:15 crc kubenswrapper[5003]: Trace[142859967]: [10.001561191s] [10.001561191s] END Dec 06 15:32:15 crc kubenswrapper[5003]: E1206 15:32:15.063563 5003 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": net/http: TLS handshake timeout" logger="UnhandledError" Dec 06 15:32:15 crc kubenswrapper[5003]: W1206 15:32:15.127462 5003 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": net/http: TLS handshake timeout Dec 06 15:32:15 crc kubenswrapper[5003]: I1206 15:32:15.127592 5003 trace.go:236] Trace[1450211597]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (06-Dec-2025 15:32:05.126) (total time: 10001ms): Dec 06 15:32:15 crc kubenswrapper[5003]: Trace[1450211597]: ---"Objects listed" error:Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": net/http: TLS handshake timeout 10001ms (15:32:15.127) Dec 06 15:32:15 crc kubenswrapper[5003]: Trace[1450211597]: [10.001236216s] [10.001236216s] END Dec 06 15:32:15 crc kubenswrapper[5003]: E1206 15:32:15.127615 5003 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": net/http: TLS handshake timeout" logger="UnhandledError" Dec 06 15:32:15 crc kubenswrapper[5003]: I1206 15:32:15.657352 5003 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Dec 06 15:32:15 crc kubenswrapper[5003]: I1206 15:32:15.657453 5003 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Dec 06 15:32:15 crc kubenswrapper[5003]: I1206 15:32:15.664219 5003 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Dec 06 15:32:15 crc kubenswrapper[5003]: I1206 15:32:15.664319 5003 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Dec 06 15:32:16 crc kubenswrapper[5003]: I1206 15:32:16.212798 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-etcd/etcd-crc" Dec 06 15:32:16 crc kubenswrapper[5003]: I1206 15:32:16.213009 5003 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 06 15:32:16 crc kubenswrapper[5003]: I1206 15:32:16.214523 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:16 crc kubenswrapper[5003]: I1206 15:32:16.214585 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:16 crc kubenswrapper[5003]: I1206 15:32:16.214606 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:16 crc kubenswrapper[5003]: I1206 15:32:16.251853 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-etcd/etcd-crc" Dec 06 15:32:16 crc kubenswrapper[5003]: I1206 15:32:16.544010 5003 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[+]ping ok Dec 06 15:32:16 crc kubenswrapper[5003]: [+]log ok Dec 06 15:32:16 crc kubenswrapper[5003]: [+]etcd ok Dec 06 15:32:16 crc kubenswrapper[5003]: [+]poststarthook/openshift.io-oauth-apiserver-reachable ok Dec 06 15:32:16 crc kubenswrapper[5003]: [+]poststarthook/start-apiserver-admission-initializer ok Dec 06 15:32:16 crc kubenswrapper[5003]: [+]poststarthook/quota.openshift.io-clusterquotamapping ok Dec 06 15:32:16 crc kubenswrapper[5003]: [+]poststarthook/openshift.io-api-request-count-filter ok Dec 06 15:32:16 crc kubenswrapper[5003]: [+]poststarthook/openshift.io-startkubeinformers ok Dec 06 15:32:16 crc kubenswrapper[5003]: [+]poststarthook/openshift.io-openshift-apiserver-reachable ok Dec 06 15:32:16 crc kubenswrapper[5003]: [+]poststarthook/generic-apiserver-start-informers ok Dec 06 15:32:16 crc kubenswrapper[5003]: [+]poststarthook/priority-and-fairness-config-consumer ok Dec 06 15:32:16 crc kubenswrapper[5003]: [+]poststarthook/priority-and-fairness-filter ok Dec 06 15:32:16 crc kubenswrapper[5003]: [+]poststarthook/storage-object-count-tracker-hook ok Dec 06 15:32:16 crc kubenswrapper[5003]: [+]poststarthook/start-apiextensions-informers ok Dec 06 15:32:16 crc kubenswrapper[5003]: [+]poststarthook/start-apiextensions-controllers ok Dec 06 15:32:16 crc kubenswrapper[5003]: [+]poststarthook/crd-informer-synced ok Dec 06 15:32:16 crc kubenswrapper[5003]: [+]poststarthook/start-system-namespaces-controller ok Dec 06 15:32:16 crc kubenswrapper[5003]: [+]poststarthook/start-cluster-authentication-info-controller ok Dec 06 15:32:16 crc kubenswrapper[5003]: [+]poststarthook/start-kube-apiserver-identity-lease-controller ok Dec 06 15:32:16 crc kubenswrapper[5003]: [+]poststarthook/start-kube-apiserver-identity-lease-garbage-collector ok Dec 06 15:32:16 crc kubenswrapper[5003]: [+]poststarthook/start-legacy-token-tracking-controller ok Dec 06 15:32:16 crc kubenswrapper[5003]: [+]poststarthook/start-service-ip-repair-controllers ok Dec 06 15:32:16 crc kubenswrapper[5003]: [-]poststarthook/rbac/bootstrap-roles failed: reason withheld Dec 06 15:32:16 crc kubenswrapper[5003]: [-]poststarthook/scheduling/bootstrap-system-priority-classes failed: reason withheld Dec 06 15:32:16 crc kubenswrapper[5003]: [+]poststarthook/priority-and-fairness-config-producer ok Dec 06 15:32:16 crc kubenswrapper[5003]: [+]poststarthook/bootstrap-controller ok Dec 06 15:32:16 crc kubenswrapper[5003]: [+]poststarthook/aggregator-reload-proxy-client-cert ok Dec 06 15:32:16 crc kubenswrapper[5003]: [+]poststarthook/start-kube-aggregator-informers ok Dec 06 15:32:16 crc kubenswrapper[5003]: [+]poststarthook/apiservice-status-local-available-controller ok Dec 06 15:32:16 crc kubenswrapper[5003]: [+]poststarthook/apiservice-status-remote-available-controller ok Dec 06 15:32:16 crc kubenswrapper[5003]: [+]poststarthook/apiservice-registration-controller ok Dec 06 15:32:16 crc kubenswrapper[5003]: [+]poststarthook/apiservice-wait-for-first-sync ok Dec 06 15:32:16 crc kubenswrapper[5003]: [+]poststarthook/apiservice-discovery-controller ok Dec 06 15:32:16 crc kubenswrapper[5003]: [+]poststarthook/kube-apiserver-autoregistration ok Dec 06 15:32:16 crc kubenswrapper[5003]: [+]autoregister-completion ok Dec 06 15:32:16 crc kubenswrapper[5003]: [+]poststarthook/apiservice-openapi-controller ok Dec 06 15:32:16 crc kubenswrapper[5003]: [+]poststarthook/apiservice-openapiv3-controller ok Dec 06 15:32:16 crc kubenswrapper[5003]: livez check failed Dec 06 15:32:16 crc kubenswrapper[5003]: I1206 15:32:16.544108 5003 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 06 15:32:16 crc kubenswrapper[5003]: I1206 15:32:16.787857 5003 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 06 15:32:16 crc kubenswrapper[5003]: I1206 15:32:16.788654 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:16 crc kubenswrapper[5003]: I1206 15:32:16.788692 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:16 crc kubenswrapper[5003]: I1206 15:32:16.788704 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:16 crc kubenswrapper[5003]: I1206 15:32:16.798900 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-etcd/etcd-crc" Dec 06 15:32:17 crc kubenswrapper[5003]: I1206 15:32:17.790241 5003 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 06 15:32:17 crc kubenswrapper[5003]: I1206 15:32:17.791297 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:17 crc kubenswrapper[5003]: I1206 15:32:17.791339 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:17 crc kubenswrapper[5003]: I1206 15:32:17.791350 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:18 crc kubenswrapper[5003]: I1206 15:32:18.177773 5003 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 06 15:32:18 crc kubenswrapper[5003]: I1206 15:32:18.179742 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:18 crc kubenswrapper[5003]: I1206 15:32:18.179793 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:18 crc kubenswrapper[5003]: I1206 15:32:18.179811 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:18 crc kubenswrapper[5003]: I1206 15:32:18.179844 5003 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 06 15:32:18 crc kubenswrapper[5003]: E1206 15:32:18.185006 5003 kubelet_node_status.go:99] "Unable to register node with API server" err="nodes \"crc\" is forbidden: autoscaling.openshift.io/ManagedNode infra config cache not synchronized" node="crc" Dec 06 15:32:20 crc kubenswrapper[5003]: I1206 15:32:20.281124 5003 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Dec 06 15:32:20 crc kubenswrapper[5003]: E1206 15:32:20.659911 5003 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": context deadline exceeded" interval="6.4s" Dec 06 15:32:20 crc kubenswrapper[5003]: I1206 15:32:20.661611 5003 trace.go:236] Trace[81915392]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (06-Dec-2025 15:32:05.753) (total time: 14907ms): Dec 06 15:32:20 crc kubenswrapper[5003]: Trace[81915392]: ---"Objects listed" error: 14907ms (15:32:20.661) Dec 06 15:32:20 crc kubenswrapper[5003]: Trace[81915392]: [14.907793672s] [14.907793672s] END Dec 06 15:32:20 crc kubenswrapper[5003]: I1206 15:32:20.661659 5003 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Dec 06 15:32:20 crc kubenswrapper[5003]: I1206 15:32:20.662720 5003 trace.go:236] Trace[247632258]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (06-Dec-2025 15:32:05.907) (total time: 14754ms): Dec 06 15:32:20 crc kubenswrapper[5003]: Trace[247632258]: ---"Objects listed" error: 14754ms (15:32:20.662) Dec 06 15:32:20 crc kubenswrapper[5003]: Trace[247632258]: [14.754729878s] [14.754729878s] END Dec 06 15:32:20 crc kubenswrapper[5003]: I1206 15:32:20.662767 5003 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Dec 06 15:32:20 crc kubenswrapper[5003]: I1206 15:32:20.663815 5003 reconstruct.go:205] "DevicePaths of reconstructed volumes updated" Dec 06 15:32:20 crc kubenswrapper[5003]: I1206 15:32:20.665762 5003 reflector.go:368] Caches populated for *v1.CertificateSigningRequest from k8s.io/client-go/tools/watch/informerwatcher.go:146 Dec 06 15:32:20 crc kubenswrapper[5003]: I1206 15:32:20.710685 5003 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Liveness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:48428->192.168.126.11:17697: read: connection reset by peer" start-of-body= Dec 06 15:32:20 crc kubenswrapper[5003]: I1206 15:32:20.710720 5003 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:48438->192.168.126.11:17697: read: connection reset by peer" start-of-body= Dec 06 15:32:20 crc kubenswrapper[5003]: I1206 15:32:20.710749 5003 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:48428->192.168.126.11:17697: read: connection reset by peer" Dec 06 15:32:20 crc kubenswrapper[5003]: I1206 15:32:20.710790 5003 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:48438->192.168.126.11:17697: read: connection reset by peer" Dec 06 15:32:20 crc kubenswrapper[5003]: I1206 15:32:20.799708 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Dec 06 15:32:20 crc kubenswrapper[5003]: I1206 15:32:20.801814 5003 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="efc65bc9bb60202069cb51eed80fb62e527399c4189554e791bab3e23993c95c" exitCode=255 Dec 06 15:32:20 crc kubenswrapper[5003]: I1206 15:32:20.801867 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"efc65bc9bb60202069cb51eed80fb62e527399c4189554e791bab3e23993c95c"} Dec 06 15:32:20 crc kubenswrapper[5003]: I1206 15:32:20.802063 5003 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 06 15:32:20 crc kubenswrapper[5003]: I1206 15:32:20.803027 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:20 crc kubenswrapper[5003]: I1206 15:32:20.803081 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:20 crc kubenswrapper[5003]: I1206 15:32:20.803120 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:20 crc kubenswrapper[5003]: I1206 15:32:20.803733 5003 scope.go:117] "RemoveContainer" containerID="efc65bc9bb60202069cb51eed80fb62e527399c4189554e791bab3e23993c95c" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.041066 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.041230 5003 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.042377 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.042419 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.042428 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.044914 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.256130 5003 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.547238 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.659655 5003 apiserver.go:52] "Watching apiserver" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.664435 5003 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.664684 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-network-node-identity/network-node-identity-vrzqb","openshift-network-operator/iptables-alerter-4ln5h","openshift-network-operator/network-operator-58b4c7f79c-55gtf","openshift-network-console/networking-console-plugin-85b44fc459-gdk6g","openshift-network-diagnostics/network-check-source-55646444c4-trplf","openshift-network-diagnostics/network-check-target-xd92c"] Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.665009 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.665089 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.665166 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.665132 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 06 15:32:21 crc kubenswrapper[5003]: E1206 15:32:21.665379 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 06 15:32:21 crc kubenswrapper[5003]: E1206 15:32:21.665177 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.665259 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.665198 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 06 15:32:21 crc kubenswrapper[5003]: E1206 15:32:21.665549 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.668049 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.668123 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.668056 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.668450 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.668910 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.669106 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.669257 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.669398 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.669689 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.691854 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.706853 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.723627 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.733987 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.748731 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.759513 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.759819 5003 desired_state_of_world_populator.go:154] "Finished populating initial desired state of world" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.769087 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.769168 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.769203 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.769224 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.769247 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.769274 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.769295 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.769541 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" (OuterVolumeSpecName: "kube-api-access-gf66m") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "kube-api-access-gf66m". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.769620 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" (OuterVolumeSpecName: "kube-api-access-pj782") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "kube-api-access-pj782". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.769646 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" (OuterVolumeSpecName: "apiservice-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "apiservice-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.769706 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" (OuterVolumeSpecName: "kube-api-access-qs4fp") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "kube-api-access-qs4fp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.769318 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.770095 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.770225 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.770374 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.770525 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.770222 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.770673 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" (OuterVolumeSpecName: "config") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.770665 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.770761 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" (OuterVolumeSpecName: "images") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.770818 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.770847 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.770871 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.770888 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.770904 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.770908 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" (OuterVolumeSpecName: "default-certificate") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "default-certificate". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.770921 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.770964 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.770996 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.771022 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.771045 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") pod \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\" (UID: \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\") " Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.771071 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.771093 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.771116 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.771140 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.771139 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.771163 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.771187 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.771212 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.771232 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.771250 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.771265 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.771281 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.771297 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.771317 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.771336 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.771343 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" (OuterVolumeSpecName: "kube-api-access-2w9zh") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "kube-api-access-2w9zh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.771354 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.771407 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.771438 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.771446 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.771462 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.771511 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.771539 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.771562 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.771619 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.771644 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.771673 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.771697 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.771719 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.771744 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.771769 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.771793 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.771818 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.771845 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.771866 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.771887 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.771910 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.771933 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.771955 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.771978 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.772001 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.772022 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.772042 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.772068 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.772092 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.772115 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.771768 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.772136 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.772160 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.772184 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.772205 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.772227 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.772250 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.772272 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.772302 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.772325 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.772348 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.772373 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.772394 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.772417 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.772440 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") pod \"44663579-783b-4372-86d6-acf235a62d72\" (UID: \"44663579-783b-4372-86d6-acf235a62d72\") " Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.772462 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.772514 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.772537 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.772560 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.772583 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.772603 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.772624 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.772644 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.772664 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.772686 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.772706 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.772729 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.772751 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.772772 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.772805 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.772825 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.772849 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.772874 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.772897 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.772919 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.772940 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.772956 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") pod \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\" (UID: \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\") " Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.772971 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.772987 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.773002 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.773018 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.773036 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.773052 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.773068 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.773084 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.773105 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.773127 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.773144 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.773161 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.773177 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.773201 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.773226 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.773248 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.773267 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.773287 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.773309 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.773331 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.773352 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.773375 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.773396 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.773416 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.773440 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.773462 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.773507 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.773537 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.773561 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.773584 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.773606 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.773630 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.773652 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.773746 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.773774 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.773797 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.773831 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.773856 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.773878 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.773899 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.773920 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.773943 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.773964 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.773989 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.774013 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.774036 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.774060 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.776205 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.776255 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.776293 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.776332 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.776371 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.776408 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.776446 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.776482 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.776543 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.776582 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.776618 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.777123 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.777169 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.777207 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.777243 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.777282 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.777322 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.777360 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.777397 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.777437 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.777474 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.777534 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.777574 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.777615 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.777652 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.777689 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.777724 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.777759 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.777796 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.777835 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") pod \"49ef4625-1d3a-4a9f-b595-c2433d32326d\" (UID: \"49ef4625-1d3a-4a9f-b595-c2433d32326d\") " Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.777876 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.777916 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.777953 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.777990 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.778023 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.778057 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.778090 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.778132 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.778169 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.778206 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.778242 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.778278 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.778313 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.778350 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.778385 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.778419 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.778455 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.778552 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.778597 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.778637 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.778681 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.778719 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.778761 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.778797 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.778833 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.778872 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.778911 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.778953 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.778989 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.779024 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.779065 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.779836 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") on node \"crc\" DevicePath \"\"" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.779865 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") on node \"crc\" DevicePath \"\"" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.780473 5003 reconciler_common.go:293] "Volume detached for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") on node \"crc\" DevicePath \"\"" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.780536 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") on node \"crc\" DevicePath \"\"" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.780719 5003 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.780746 5003 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") on node \"crc\" DevicePath \"\"" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.780768 5003 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") on node \"crc\" DevicePath \"\"" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.780788 5003 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.780808 5003 reconciler_common.go:293] "Volume detached for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") on node \"crc\" DevicePath \"\"" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.780831 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.780853 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") on node \"crc\" DevicePath \"\"" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.780875 5003 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.771791 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.771912 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.771960 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.772066 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" (OuterVolumeSpecName: "image-import-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "image-import-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.772116 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.781326 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.772064 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.772205 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" (OuterVolumeSpecName: "kube-api-access-x2m85") pod "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" (UID: "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d"). InnerVolumeSpecName "kube-api-access-x2m85". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.772279 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" (OuterVolumeSpecName: "kube-api-access-lzf88") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "kube-api-access-lzf88". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.772290 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" (OuterVolumeSpecName: "config") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.772320 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" (OuterVolumeSpecName: "service-ca") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.772411 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.772437 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.772463 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" (OuterVolumeSpecName: "image-registry-operator-tls") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "image-registry-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.772553 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" (OuterVolumeSpecName: "kube-api-access-kfwg7") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "kube-api-access-kfwg7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.772693 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.772713 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.772750 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" (OuterVolumeSpecName: "machine-api-operator-tls") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "machine-api-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.772833 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" (OuterVolumeSpecName: "config") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.772839 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.772857 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.772980 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.772986 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" (OuterVolumeSpecName: "kube-api-access-7c4vf") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "kube-api-access-7c4vf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.773012 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" (OuterVolumeSpecName: "kube-api-access-6ccd8") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "kube-api-access-6ccd8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.772998 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.773098 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" (OuterVolumeSpecName: "config") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.773143 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" (OuterVolumeSpecName: "kube-api-access-lz9wn") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "kube-api-access-lz9wn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.773184 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.773186 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" (OuterVolumeSpecName: "config") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.773310 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.773357 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.773619 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" (OuterVolumeSpecName: "audit") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "audit". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.773715 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" (OuterVolumeSpecName: "config") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.773806 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.773894 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" (OuterVolumeSpecName: "package-server-manager-serving-cert") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "package-server-manager-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.773918 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.774143 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.774711 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" (OuterVolumeSpecName: "cert") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.774709 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" (OuterVolumeSpecName: "utilities") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.774827 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.774924 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.775038 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.775458 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" (OuterVolumeSpecName: "kube-api-access-xcphl") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "kube-api-access-xcphl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.775484 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.775680 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.775717 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" (OuterVolumeSpecName: "kube-api-access-zkvpv") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "kube-api-access-zkvpv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.776032 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" (OuterVolumeSpecName: "kube-api-access-279lb") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "kube-api-access-279lb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.776627 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" (OuterVolumeSpecName: "config") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.776706 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.776736 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" (OuterVolumeSpecName: "kube-api-access-mg5zb") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "kube-api-access-mg5zb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.776895 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.782185 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.777220 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.777350 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.777389 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.777781 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.778136 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" (OuterVolumeSpecName: "kube-api-access-zgdk5") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "kube-api-access-zgdk5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.778144 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" (OuterVolumeSpecName: "control-plane-machine-set-operator-tls") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "control-plane-machine-set-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.778344 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" (OuterVolumeSpecName: "kube-api-access-d4lsv") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "kube-api-access-d4lsv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.777462 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" (OuterVolumeSpecName: "config-volume") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.778848 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" (OuterVolumeSpecName: "mcc-auth-proxy-config") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "mcc-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.778477 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.779250 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" (OuterVolumeSpecName: "machine-approver-tls") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "machine-approver-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.779268 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" (OuterVolumeSpecName: "node-bootstrap-token") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "node-bootstrap-token". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.779656 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" (OuterVolumeSpecName: "kube-api-access-9xfj7") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "kube-api-access-9xfj7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.779937 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" (OuterVolumeSpecName: "kube-api-access-x7zkh") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "kube-api-access-x7zkh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.780421 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" (OuterVolumeSpecName: "kube-api-access-249nr") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "kube-api-access-249nr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.780578 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.780772 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.780986 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" (OuterVolumeSpecName: "kube-api-access-wxkg8") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "kube-api-access-wxkg8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.781500 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.781600 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.781712 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.781768 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" (OuterVolumeSpecName: "kube-api-access-ngvvp") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "kube-api-access-ngvvp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.781866 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.782061 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.782423 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.782448 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.782764 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.783212 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" (OuterVolumeSpecName: "console-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.783457 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" (OuterVolumeSpecName: "config") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.783513 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" (OuterVolumeSpecName: "kube-api-access-tk88c") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "kube-api-access-tk88c". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.783727 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" (OuterVolumeSpecName: "kube-api-access-jhbk2") pod "bd23aa5c-e532-4e53-bccf-e79f130c5ae8" (UID: "bd23aa5c-e532-4e53-bccf-e79f130c5ae8"). InnerVolumeSpecName "kube-api-access-jhbk2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.783645 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" (OuterVolumeSpecName: "signing-cabundle") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-cabundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.783849 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.783863 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.783902 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.784092 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" (OuterVolumeSpecName: "kube-api-access-xcgwh") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "kube-api-access-xcgwh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.784109 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" (OuterVolumeSpecName: "kube-api-access-qg5z5") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "kube-api-access-qg5z5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.784097 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" (OuterVolumeSpecName: "kube-api-access-fqsjt") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "kube-api-access-fqsjt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.784131 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" (OuterVolumeSpecName: "signing-key") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.784336 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.784292 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" (OuterVolumeSpecName: "client-ca") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.784390 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" (OuterVolumeSpecName: "utilities") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.784799 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.784876 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.784906 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" (OuterVolumeSpecName: "multus-daemon-config") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "multus-daemon-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.784974 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" (OuterVolumeSpecName: "config") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.785131 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.785408 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" (OuterVolumeSpecName: "ovn-control-plane-metrics-cert") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovn-control-plane-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.786273 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.786331 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" (OuterVolumeSpecName: "utilities") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.786377 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" (OuterVolumeSpecName: "mcd-auth-proxy-config") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "mcd-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.786421 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" (OuterVolumeSpecName: "kube-api-access-bf2bz") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "kube-api-access-bf2bz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.786439 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.786840 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" (OuterVolumeSpecName: "etcd-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.786907 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" (OuterVolumeSpecName: "kube-api-access-s4n52") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "kube-api-access-s4n52". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.787108 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.787250 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" (OuterVolumeSpecName: "kube-api-access-cfbct") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "kube-api-access-cfbct". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.787394 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" (OuterVolumeSpecName: "kube-api-access-htfz6") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "kube-api-access-htfz6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.787301 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.787410 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.787692 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" (OuterVolumeSpecName: "kube-api-access-vt5rc") pod "44663579-783b-4372-86d6-acf235a62d72" (UID: "44663579-783b-4372-86d6-acf235a62d72"). InnerVolumeSpecName "kube-api-access-vt5rc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.787697 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" (OuterVolumeSpecName: "kube-api-access-nzwt7") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "kube-api-access-nzwt7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.787746 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.787879 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" (OuterVolumeSpecName: "etcd-service-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.788146 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.788281 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" (OuterVolumeSpecName: "kube-api-access-v47cf") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "kube-api-access-v47cf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.788439 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.788450 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.787849 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" (OuterVolumeSpecName: "kube-api-access-dbsvg") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "kube-api-access-dbsvg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.788903 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" (OuterVolumeSpecName: "available-featuregates") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "available-featuregates". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.789250 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" (OuterVolumeSpecName: "kube-api-access-jkwtn") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "kube-api-access-jkwtn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.789318 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" (OuterVolumeSpecName: "kube-api-access-8tdtz") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "kube-api-access-8tdtz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.789362 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" (OuterVolumeSpecName: "webhook-certs") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "webhook-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.789535 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.790081 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.790667 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" (OuterVolumeSpecName: "service-ca") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.790922 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" (OuterVolumeSpecName: "kube-api-access-2d4wz") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "kube-api-access-2d4wz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.790949 5003 swap_util.go:74] "error creating dir to test if tmpfs noswap is enabled. Assuming not supported" mount path="" error="stat /var/lib/kubelet/plugins/kubernetes.io/empty-dir: no such file or directory" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.791263 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" (OuterVolumeSpecName: "webhook-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "webhook-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.791775 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" (OuterVolumeSpecName: "kube-api-access-sb6h7") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "kube-api-access-sb6h7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.791845 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.791943 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.792083 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.792258 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.792316 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.792370 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" (OuterVolumeSpecName: "kube-api-access-pcxfs") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "kube-api-access-pcxfs". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.792407 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.792528 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" (OuterVolumeSpecName: "utilities") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.792663 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" (OuterVolumeSpecName: "kube-api-access-w4xd4") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "kube-api-access-w4xd4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.792776 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" (OuterVolumeSpecName: "kube-api-access-4d4hj") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "kube-api-access-4d4hj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.792806 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.792943 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" (OuterVolumeSpecName: "config") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.792961 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" (OuterVolumeSpecName: "config") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.793163 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" (OuterVolumeSpecName: "kube-api-access-x4zgh") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "kube-api-access-x4zgh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.793394 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 15:32:21 crc kubenswrapper[5003]: E1206 15:32:21.793631 5003 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.793650 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 15:32:21 crc kubenswrapper[5003]: E1206 15:32:21.793797 5003 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.793992 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" (OuterVolumeSpecName: "samples-operator-tls") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "samples-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 15:32:21 crc kubenswrapper[5003]: E1206 15:32:21.794001 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-06 15:32:22.293942724 +0000 UTC m=+20.827297215 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.794071 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 06 15:32:21 crc kubenswrapper[5003]: E1206 15:32:21.794082 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-06 15:32:22.294045277 +0000 UTC m=+20.827399708 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.794410 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.794467 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" (OuterVolumeSpecName: "tmpfs") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "tmpfs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 15:32:21 crc kubenswrapper[5003]: E1206 15:32:21.794563 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-06 15:32:22.294546481 +0000 UTC m=+20.827900862 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.794668 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" (OuterVolumeSpecName: "stats-auth") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "stats-auth". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.794883 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.796121 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.796324 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" (OuterVolumeSpecName: "kube-api-access-w9rds") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "kube-api-access-w9rds". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.798614 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" (OuterVolumeSpecName: "images") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.799907 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" (OuterVolumeSpecName: "cni-sysctl-allowlist") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-sysctl-allowlist". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.799925 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" (OuterVolumeSpecName: "client-ca") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.799914 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 15:32:21 crc kubenswrapper[5003]: E1206 15:32:21.800020 5003 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 06 15:32:21 crc kubenswrapper[5003]: E1206 15:32:21.800039 5003 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 06 15:32:21 crc kubenswrapper[5003]: E1206 15:32:21.800053 5003 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 06 15:32:21 crc kubenswrapper[5003]: E1206 15:32:21.800260 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-06 15:32:22.300242497 +0000 UTC m=+20.833596978 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.800812 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.801110 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.801246 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.801274 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.801473 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" (OuterVolumeSpecName: "serviceca") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "serviceca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.801658 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.802592 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" (OuterVolumeSpecName: "config") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.803208 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" (OuterVolumeSpecName: "kube-api-access-fcqwp") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "kube-api-access-fcqwp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.803333 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" (OuterVolumeSpecName: "certs") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.803365 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.803983 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.804654 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.805919 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" (OuterVolumeSpecName: "config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.808895 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" (OuterVolumeSpecName: "kube-api-access-w7l8j") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "kube-api-access-w7l8j". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.809074 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" (OuterVolumeSpecName: "kube-api-access-d6qdx") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "kube-api-access-d6qdx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.810254 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.810801 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" (OuterVolumeSpecName: "kube-api-access-rnphk") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "kube-api-access-rnphk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.811827 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"ae2215e89d65c7bd69288cda74cdd83f0349f57b47f80ff1fa03d60a484558b5"} Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.812051 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" (OuterVolumeSpecName: "config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.812137 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.812262 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.812330 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.811988 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.812528 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.812901 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.812960 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" (OuterVolumeSpecName: "kube-api-access-pjr6v") pod "49ef4625-1d3a-4a9f-b595-c2433d32326d" (UID: "49ef4625-1d3a-4a9f-b595-c2433d32326d"). InnerVolumeSpecName "kube-api-access-pjr6v". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.813035 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" (OuterVolumeSpecName: "kube-api-access-mnrrd") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "kube-api-access-mnrrd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.813150 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" (OuterVolumeSpecName: "kube-api-access-6g6sz") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "kube-api-access-6g6sz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.814222 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 06 15:32:21 crc kubenswrapper[5003]: E1206 15:32:21.814264 5003 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 06 15:32:21 crc kubenswrapper[5003]: E1206 15:32:21.814282 5003 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 06 15:32:21 crc kubenswrapper[5003]: E1206 15:32:21.814297 5003 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 06 15:32:21 crc kubenswrapper[5003]: E1206 15:32:21.814529 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-06 15:32:22.314402193 +0000 UTC m=+20.847756574 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.818806 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.820737 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.823085 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/kube-controller-manager-crc"] Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.823313 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.824992 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" (OuterVolumeSpecName: "config") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.825119 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.827002 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.833530 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.840183 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.841246 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.842780 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.843211 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.845113 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.853804 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.869107 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.880494 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.881201 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.881288 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.881299 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.881407 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.881417 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") on node \"crc\" DevicePath \"\"" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.881460 5003 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.881473 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") on node \"crc\" DevicePath \"\"" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.881516 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") on node \"crc\" DevicePath \"\"" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.881531 5003 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.881543 5003 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.881555 5003 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") on node \"crc\" DevicePath \"\"" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.881568 5003 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.881608 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") on node \"crc\" DevicePath \"\"" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.881621 5003 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.881632 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") on node \"crc\" DevicePath \"\"" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.881643 5003 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") on node \"crc\" DevicePath \"\"" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.881680 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") on node \"crc\" DevicePath \"\"" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.881694 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") on node \"crc\" DevicePath \"\"" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.881707 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.881717 5003 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") on node \"crc\" DevicePath \"\"" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.881728 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") on node \"crc\" DevicePath \"\"" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.881765 5003 reconciler_common.go:293] "Volume detached for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") on node \"crc\" DevicePath \"\"" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.881777 5003 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.881788 5003 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.881799 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") on node \"crc\" DevicePath \"\"" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.881846 5003 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") on node \"crc\" DevicePath \"\"" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.881863 5003 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.881874 5003 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") on node \"crc\" DevicePath \"\"" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.881885 5003 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.881896 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") on node \"crc\" DevicePath \"\"" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.881909 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") on node \"crc\" DevicePath \"\"" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.881919 5003 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.881930 5003 reconciler_common.go:293] "Volume detached for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") on node \"crc\" DevicePath \"\"" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.881941 5003 reconciler_common.go:293] "Volume detached for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.881951 5003 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.881962 5003 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.881973 5003 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") on node \"crc\" DevicePath \"\"" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.881984 5003 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.881995 5003 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") on node \"crc\" DevicePath \"\"" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.882005 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") on node \"crc\" DevicePath \"\"" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.882016 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") on node \"crc\" DevicePath \"\"" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.882027 5003 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.882038 5003 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") on node \"crc\" DevicePath \"\"" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.882048 5003 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") on node \"crc\" DevicePath \"\"" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.882058 5003 reconciler_common.go:293] "Volume detached for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") on node \"crc\" DevicePath \"\"" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.882069 5003 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.882080 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") on node \"crc\" DevicePath \"\"" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.882091 5003 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.882101 5003 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") on node \"crc\" DevicePath \"\"" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.882111 5003 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.882123 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") on node \"crc\" DevicePath \"\"" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.882134 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") on node \"crc\" DevicePath \"\"" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.882145 5003 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.882156 5003 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.882168 5003 reconciler_common.go:293] "Volume detached for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.882180 5003 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") on node \"crc\" DevicePath \"\"" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.882216 5003 reconciler_common.go:293] "Volume detached for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") on node \"crc\" DevicePath \"\"" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.882229 5003 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") on node \"crc\" DevicePath \"\"" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.882241 5003 reconciler_common.go:293] "Volume detached for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.882255 5003 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.882267 5003 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.882279 5003 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") on node \"crc\" DevicePath \"\"" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.882290 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.882303 5003 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") on node \"crc\" DevicePath \"\"" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.882314 5003 reconciler_common.go:293] "Volume detached for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.882327 5003 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.882340 5003 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.882352 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") on node \"crc\" DevicePath \"\"" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.882363 5003 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") on node \"crc\" DevicePath \"\"" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.882376 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") on node \"crc\" DevicePath \"\"" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.882388 5003 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") on node \"crc\" DevicePath \"\"" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.882400 5003 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.882412 5003 reconciler_common.go:293] "Volume detached for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") on node \"crc\" DevicePath \"\"" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.882423 5003 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") on node \"crc\" DevicePath \"\"" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.882436 5003 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") on node \"crc\" DevicePath \"\"" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.882448 5003 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") on node \"crc\" DevicePath \"\"" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.882458 5003 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") on node \"crc\" DevicePath \"\"" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.882470 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") on node \"crc\" DevicePath \"\"" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.882517 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") on node \"crc\" DevicePath \"\"" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.882532 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") on node \"crc\" DevicePath \"\"" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.882544 5003 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") on node \"crc\" DevicePath \"\"" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.882556 5003 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.882568 5003 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.882580 5003 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") on node \"crc\" DevicePath \"\"" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.882592 5003 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") on node \"crc\" DevicePath \"\"" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.882603 5003 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.882616 5003 reconciler_common.go:293] "Volume detached for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.882629 5003 reconciler_common.go:293] "Volume detached for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") on node \"crc\" DevicePath \"\"" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.882641 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") on node \"crc\" DevicePath \"\"" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.882654 5003 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.882668 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") on node \"crc\" DevicePath \"\"" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.882679 5003 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") on node \"crc\" DevicePath \"\"" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.882691 5003 reconciler_common.go:293] "Volume detached for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") on node \"crc\" DevicePath \"\"" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.882703 5003 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") on node \"crc\" DevicePath \"\"" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.882714 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") on node \"crc\" DevicePath \"\"" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.882725 5003 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.882736 5003 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.882748 5003 reconciler_common.go:293] "Volume detached for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") on node \"crc\" DevicePath \"\"" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.882761 5003 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.882774 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") on node \"crc\" DevicePath \"\"" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.882792 5003 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.882805 5003 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") on node \"crc\" DevicePath \"\"" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.882816 5003 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.882828 5003 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.882839 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") on node \"crc\" DevicePath \"\"" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.882850 5003 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") on node \"crc\" DevicePath \"\"" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.882862 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") on node \"crc\" DevicePath \"\"" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.882874 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") on node \"crc\" DevicePath \"\"" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.882885 5003 reconciler_common.go:293] "Volume detached for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.882897 5003 reconciler_common.go:293] "Volume detached for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") on node \"crc\" DevicePath \"\"" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.882909 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") on node \"crc\" DevicePath \"\"" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.882921 5003 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") on node \"crc\" DevicePath \"\"" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.882933 5003 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") on node \"crc\" DevicePath \"\"" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.882944 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") on node \"crc\" DevicePath \"\"" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.882957 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") on node \"crc\" DevicePath \"\"" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.882968 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") on node \"crc\" DevicePath \"\"" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.882980 5003 reconciler_common.go:293] "Volume detached for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") on node \"crc\" DevicePath \"\"" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.882991 5003 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.883002 5003 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.883013 5003 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.883025 5003 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.883038 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") on node \"crc\" DevicePath \"\"" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.883051 5003 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") on node \"crc\" DevicePath \"\"" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.883063 5003 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.883075 5003 reconciler_common.go:293] "Volume detached for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") on node \"crc\" DevicePath \"\"" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.883087 5003 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.883099 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") on node \"crc\" DevicePath \"\"" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.883111 5003 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") on node \"crc\" DevicePath \"\"" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.883123 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") on node \"crc\" DevicePath \"\"" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.883135 5003 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.883165 5003 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.883178 5003 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.883191 5003 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.883202 5003 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") on node \"crc\" DevicePath \"\"" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.883215 5003 reconciler_common.go:293] "Volume detached for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") on node \"crc\" DevicePath \"\"" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.883226 5003 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") on node \"crc\" DevicePath \"\"" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.883238 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") on node \"crc\" DevicePath \"\"" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.883249 5003 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") on node \"crc\" DevicePath \"\"" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.883260 5003 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") on node \"crc\" DevicePath \"\"" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.883272 5003 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") on node \"crc\" DevicePath \"\"" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.883284 5003 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") on node \"crc\" DevicePath \"\"" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.883297 5003 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.883310 5003 reconciler_common.go:293] "Volume detached for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") on node \"crc\" DevicePath \"\"" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.883322 5003 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") on node \"crc\" DevicePath \"\"" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.883334 5003 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.883345 5003 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") on node \"crc\" DevicePath \"\"" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.883357 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") on node \"crc\" DevicePath \"\"" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.883369 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") on node \"crc\" DevicePath \"\"" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.883381 5003 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.883393 5003 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.883404 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.883415 5003 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.883427 5003 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.883438 5003 reconciler_common.go:293] "Volume detached for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") on node \"crc\" DevicePath \"\"" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.883448 5003 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") on node \"crc\" DevicePath \"\"" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.883460 5003 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.883471 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") on node \"crc\" DevicePath \"\"" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.883504 5003 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") on node \"crc\" DevicePath \"\"" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.883516 5003 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") on node \"crc\" DevicePath \"\"" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.883528 5003 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") on node \"crc\" DevicePath \"\"" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.883539 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") on node \"crc\" DevicePath \"\"" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.883550 5003 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") on node \"crc\" DevicePath \"\"" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.883561 5003 reconciler_common.go:293] "Volume detached for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") on node \"crc\" DevicePath \"\"" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.883573 5003 reconciler_common.go:293] "Volume detached for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") on node \"crc\" DevicePath \"\"" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.883584 5003 reconciler_common.go:293] "Volume detached for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") on node \"crc\" DevicePath \"\"" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.883595 5003 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.883606 5003 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.883617 5003 reconciler_common.go:293] "Volume detached for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") on node \"crc\" DevicePath \"\"" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.883628 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") on node \"crc\" DevicePath \"\"" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.883639 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") on node \"crc\" DevicePath \"\"" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.883651 5003 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") on node \"crc\" DevicePath \"\"" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.883661 5003 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.883672 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") on node \"crc\" DevicePath \"\"" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.883683 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") on node \"crc\" DevicePath \"\"" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.883694 5003 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") on node \"crc\" DevicePath \"\"" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.883728 5003 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.883741 5003 reconciler_common.go:293] "Volume detached for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.883752 5003 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.883763 5003 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") on node \"crc\" DevicePath \"\"" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.883774 5003 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.883785 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") on node \"crc\" DevicePath \"\"" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.883796 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") on node \"crc\" DevicePath \"\"" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.883807 5003 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.883817 5003 reconciler_common.go:293] "Volume detached for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") on node \"crc\" DevicePath \"\"" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.883827 5003 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") on node \"crc\" DevicePath \"\"" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.883838 5003 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") on node \"crc\" DevicePath \"\"" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.889416 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.901393 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5ae2d36f-5a31-4da3-aae8-0378c481f230\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9f683cdba22afa5d1d069cc32c6c83addf344c8b0ba3b39b7a2ca527408922c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c777cc5d07ff005aa8001da8a566484710f0253e4a6061e3297a884c6153a7d0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ac8291e7fa9a5ff379474802b6eae771542705aef4c443cc3123837d10dd9e5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae2215e89d65c7bd69288cda74cdd83f0349f57b47f80ff1fa03d60a484558b5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://efc65bc9bb60202069cb51eed80fb62e527399c4189554e791bab3e23993c95c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-06T15:32:20Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1206 15:32:14.957831 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1206 15:32:14.959306 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1213261896/tls.crt::/tmp/serving-cert-1213261896/tls.key\\\\\\\"\\\\nI1206 15:32:20.684925 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1206 15:32:20.690335 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1206 15:32:20.690422 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1206 15:32:20.690568 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1206 15:32:20.690599 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1206 15:32:20.700990 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1206 15:32:20.701016 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1206 15:32:20.701048 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1206 15:32:20.701062 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1206 15:32:20.701075 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1206 15:32:20.701085 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1206 15:32:20.701093 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1206 15:32:20.701100 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1206 15:32:20.702607 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:04Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6013b3a95ef99138e6ea971b51a45f65923c09435a2595bba7af91eebb28d76\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4256540a96bdcd018eb514cf70eea305513bb418afa89c8bfa5179c67822380e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4256540a96bdcd018eb514cf70eea305513bb418afa89c8bfa5179c67822380e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.910421 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c48c2555-6e1a-4e2a-801c-de22b016a80d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d0880a937c71f4a7b46ffc4dcc10f50f5db23655dedb5d7e95b36cf0fcb44928\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f5d5d47cea1d06df2f36a0a15126c2419e5051d84842c781921577bec3c65f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5e7cb74b8b6e2a00de6dbbd1689b52fcc3ae9862d40685bab7a805646abd4a3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3377599a5ceebbce7bd9d45a6f78122fc48d86ff917a6dcfc03304ca3361659\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:02Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.918705 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.928580 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.939682 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.983120 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.992964 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 06 15:32:21 crc kubenswrapper[5003]: I1206 15:32:21.999751 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 06 15:32:22 crc kubenswrapper[5003]: W1206 15:32:22.007778 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd75a4c96_2883_4a0b_bab2_0fab2b6c0b49.slice/crio-bef4e78686a460191a7875c3567da82c98fb29c720bb7fcf473da867eee9ff77 WatchSource:0}: Error finding container bef4e78686a460191a7875c3567da82c98fb29c720bb7fcf473da867eee9ff77: Status 404 returned error can't find the container with id bef4e78686a460191a7875c3567da82c98fb29c720bb7fcf473da867eee9ff77 Dec 06 15:32:22 crc kubenswrapper[5003]: I1206 15:32:22.389413 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 06 15:32:22 crc kubenswrapper[5003]: I1206 15:32:22.389512 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 06 15:32:22 crc kubenswrapper[5003]: I1206 15:32:22.389555 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 06 15:32:22 crc kubenswrapper[5003]: E1206 15:32:22.389604 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-06 15:32:23.389556196 +0000 UTC m=+21.922910577 (durationBeforeRetry 1s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 15:32:22 crc kubenswrapper[5003]: I1206 15:32:22.389625 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 06 15:32:22 crc kubenswrapper[5003]: I1206 15:32:22.389649 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 06 15:32:22 crc kubenswrapper[5003]: E1206 15:32:22.389650 5003 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 06 15:32:22 crc kubenswrapper[5003]: E1206 15:32:22.389689 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-06 15:32:23.38968287 +0000 UTC m=+21.923037251 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 06 15:32:22 crc kubenswrapper[5003]: E1206 15:32:22.389734 5003 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 06 15:32:22 crc kubenswrapper[5003]: E1206 15:32:22.389741 5003 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 06 15:32:22 crc kubenswrapper[5003]: E1206 15:32:22.389749 5003 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 06 15:32:22 crc kubenswrapper[5003]: E1206 15:32:22.389762 5003 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 06 15:32:22 crc kubenswrapper[5003]: E1206 15:32:22.389794 5003 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 06 15:32:22 crc kubenswrapper[5003]: E1206 15:32:22.389798 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-06 15:32:23.389787283 +0000 UTC m=+21.923141674 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 06 15:32:22 crc kubenswrapper[5003]: E1206 15:32:22.389815 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-06 15:32:23.389809213 +0000 UTC m=+21.923163594 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 06 15:32:22 crc kubenswrapper[5003]: E1206 15:32:22.389751 5003 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 06 15:32:22 crc kubenswrapper[5003]: E1206 15:32:22.389828 5003 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 06 15:32:22 crc kubenswrapper[5003]: E1206 15:32:22.389855 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-06 15:32:23.389847164 +0000 UTC m=+21.923201555 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 06 15:32:22 crc kubenswrapper[5003]: I1206 15:32:22.816055 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"224b845cf75da20cf87e5435770288110776a0edd92c9c110d6dc539419ecb7f"} Dec 06 15:32:22 crc kubenswrapper[5003]: I1206 15:32:22.816107 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"c0df37db3b8682032a0065b4ca1cead69bf0b32cb4e2fab89c53e14eb2169b7e"} Dec 06 15:32:22 crc kubenswrapper[5003]: I1206 15:32:22.816121 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"191e726eb2bcef3f70dea88563bf50127966c12f30c189f4befc1f1aa79720db"} Dec 06 15:32:22 crc kubenswrapper[5003]: I1206 15:32:22.817808 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"bef4e78686a460191a7875c3567da82c98fb29c720bb7fcf473da867eee9ff77"} Dec 06 15:32:22 crc kubenswrapper[5003]: I1206 15:32:22.819283 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"aa744e792aa97558246eaa95c80a307138b1a6e08fe5de144b3fff62f3932e20"} Dec 06 15:32:22 crc kubenswrapper[5003]: I1206 15:32:22.819316 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"39c3d8eb7d67d3094235e75ff62759640a871066008f28bb3d5c3f9345ab561a"} Dec 06 15:32:22 crc kubenswrapper[5003]: I1206 15:32:22.842540 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:22Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:22 crc kubenswrapper[5003]: I1206 15:32:22.863393 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:22Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:22 crc kubenswrapper[5003]: I1206 15:32:22.887780 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:22Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:22 crc kubenswrapper[5003]: I1206 15:32:22.913005 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:22Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:22 crc kubenswrapper[5003]: I1206 15:32:22.936979 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://224b845cf75da20cf87e5435770288110776a0edd92c9c110d6dc539419ecb7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0df37db3b8682032a0065b4ca1cead69bf0b32cb4e2fab89c53e14eb2169b7e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:22Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:22 crc kubenswrapper[5003]: I1206 15:32:22.955241 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:22Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:22 crc kubenswrapper[5003]: I1206 15:32:22.970763 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5ae2d36f-5a31-4da3-aae8-0378c481f230\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9f683cdba22afa5d1d069cc32c6c83addf344c8b0ba3b39b7a2ca527408922c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c777cc5d07ff005aa8001da8a566484710f0253e4a6061e3297a884c6153a7d0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ac8291e7fa9a5ff379474802b6eae771542705aef4c443cc3123837d10dd9e5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae2215e89d65c7bd69288cda74cdd83f0349f57b47f80ff1fa03d60a484558b5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://efc65bc9bb60202069cb51eed80fb62e527399c4189554e791bab3e23993c95c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-06T15:32:20Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1206 15:32:14.957831 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1206 15:32:14.959306 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1213261896/tls.crt::/tmp/serving-cert-1213261896/tls.key\\\\\\\"\\\\nI1206 15:32:20.684925 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1206 15:32:20.690335 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1206 15:32:20.690422 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1206 15:32:20.690568 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1206 15:32:20.690599 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1206 15:32:20.700990 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1206 15:32:20.701016 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1206 15:32:20.701048 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1206 15:32:20.701062 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1206 15:32:20.701075 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1206 15:32:20.701085 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1206 15:32:20.701093 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1206 15:32:20.701100 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1206 15:32:20.702607 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:04Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6013b3a95ef99138e6ea971b51a45f65923c09435a2595bba7af91eebb28d76\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4256540a96bdcd018eb514cf70eea305513bb418afa89c8bfa5179c67822380e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4256540a96bdcd018eb514cf70eea305513bb418afa89c8bfa5179c67822380e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:22Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:22 crc kubenswrapper[5003]: I1206 15:32:22.985067 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c48c2555-6e1a-4e2a-801c-de22b016a80d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d0880a937c71f4a7b46ffc4dcc10f50f5db23655dedb5d7e95b36cf0fcb44928\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f5d5d47cea1d06df2f36a0a15126c2419e5051d84842c781921577bec3c65f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5e7cb74b8b6e2a00de6dbbd1689b52fcc3ae9862d40685bab7a805646abd4a3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3377599a5ceebbce7bd9d45a6f78122fc48d86ff917a6dcfc03304ca3361659\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:02Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:22Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:23 crc kubenswrapper[5003]: I1206 15:32:23.002224 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:23Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:23 crc kubenswrapper[5003]: I1206 15:32:23.019561 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:23Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:23 crc kubenswrapper[5003]: I1206 15:32:23.036075 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:23Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:23 crc kubenswrapper[5003]: I1206 15:32:23.057285 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c48c2555-6e1a-4e2a-801c-de22b016a80d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d0880a937c71f4a7b46ffc4dcc10f50f5db23655dedb5d7e95b36cf0fcb44928\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f5d5d47cea1d06df2f36a0a15126c2419e5051d84842c781921577bec3c65f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5e7cb74b8b6e2a00de6dbbd1689b52fcc3ae9862d40685bab7a805646abd4a3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3377599a5ceebbce7bd9d45a6f78122fc48d86ff917a6dcfc03304ca3361659\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:02Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:23Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:23 crc kubenswrapper[5003]: I1206 15:32:23.074276 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa744e792aa97558246eaa95c80a307138b1a6e08fe5de144b3fff62f3932e20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:23Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:23 crc kubenswrapper[5003]: I1206 15:32:23.089284 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://224b845cf75da20cf87e5435770288110776a0edd92c9c110d6dc539419ecb7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0df37db3b8682032a0065b4ca1cead69bf0b32cb4e2fab89c53e14eb2169b7e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:23Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:23 crc kubenswrapper[5003]: I1206 15:32:23.104214 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:23Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:23 crc kubenswrapper[5003]: I1206 15:32:23.118940 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5ae2d36f-5a31-4da3-aae8-0378c481f230\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9f683cdba22afa5d1d069cc32c6c83addf344c8b0ba3b39b7a2ca527408922c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c777cc5d07ff005aa8001da8a566484710f0253e4a6061e3297a884c6153a7d0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ac8291e7fa9a5ff379474802b6eae771542705aef4c443cc3123837d10dd9e5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae2215e89d65c7bd69288cda74cdd83f0349f57b47f80ff1fa03d60a484558b5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://efc65bc9bb60202069cb51eed80fb62e527399c4189554e791bab3e23993c95c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-06T15:32:20Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1206 15:32:14.957831 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1206 15:32:14.959306 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1213261896/tls.crt::/tmp/serving-cert-1213261896/tls.key\\\\\\\"\\\\nI1206 15:32:20.684925 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1206 15:32:20.690335 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1206 15:32:20.690422 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1206 15:32:20.690568 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1206 15:32:20.690599 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1206 15:32:20.700990 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1206 15:32:20.701016 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1206 15:32:20.701048 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1206 15:32:20.701062 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1206 15:32:20.701075 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1206 15:32:20.701085 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1206 15:32:20.701093 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1206 15:32:20.701100 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1206 15:32:20.702607 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:04Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6013b3a95ef99138e6ea971b51a45f65923c09435a2595bba7af91eebb28d76\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4256540a96bdcd018eb514cf70eea305513bb418afa89c8bfa5179c67822380e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4256540a96bdcd018eb514cf70eea305513bb418afa89c8bfa5179c67822380e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:23Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:23 crc kubenswrapper[5003]: I1206 15:32:23.399005 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 06 15:32:23 crc kubenswrapper[5003]: I1206 15:32:23.399090 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 06 15:32:23 crc kubenswrapper[5003]: E1206 15:32:23.399161 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-06 15:32:25.399133326 +0000 UTC m=+23.932487707 (durationBeforeRetry 2s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 15:32:23 crc kubenswrapper[5003]: E1206 15:32:23.399205 5003 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 06 15:32:23 crc kubenswrapper[5003]: E1206 15:32:23.399223 5003 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 06 15:32:23 crc kubenswrapper[5003]: I1206 15:32:23.399231 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 06 15:32:23 crc kubenswrapper[5003]: E1206 15:32:23.399239 5003 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 06 15:32:23 crc kubenswrapper[5003]: I1206 15:32:23.399257 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 06 15:32:23 crc kubenswrapper[5003]: E1206 15:32:23.399292 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-06 15:32:25.39927667 +0000 UTC m=+23.932631061 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 06 15:32:23 crc kubenswrapper[5003]: I1206 15:32:23.399316 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 06 15:32:23 crc kubenswrapper[5003]: E1206 15:32:23.399357 5003 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 06 15:32:23 crc kubenswrapper[5003]: E1206 15:32:23.399372 5003 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 06 15:32:23 crc kubenswrapper[5003]: E1206 15:32:23.399382 5003 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 06 15:32:23 crc kubenswrapper[5003]: E1206 15:32:23.399386 5003 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 06 15:32:23 crc kubenswrapper[5003]: E1206 15:32:23.399401 5003 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 06 15:32:23 crc kubenswrapper[5003]: E1206 15:32:23.399412 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-06 15:32:25.399405874 +0000 UTC m=+23.932760255 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 06 15:32:23 crc kubenswrapper[5003]: E1206 15:32:23.399464 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-06 15:32:25.399451715 +0000 UTC m=+23.932806116 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 06 15:32:23 crc kubenswrapper[5003]: E1206 15:32:23.399480 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-06 15:32:25.399471346 +0000 UTC m=+23.932825737 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 06 15:32:23 crc kubenswrapper[5003]: I1206 15:32:23.712040 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 06 15:32:23 crc kubenswrapper[5003]: I1206 15:32:23.712113 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 06 15:32:23 crc kubenswrapper[5003]: I1206 15:32:23.712166 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 06 15:32:23 crc kubenswrapper[5003]: E1206 15:32:23.712314 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 06 15:32:23 crc kubenswrapper[5003]: E1206 15:32:23.712401 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 06 15:32:23 crc kubenswrapper[5003]: E1206 15:32:23.712605 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 06 15:32:23 crc kubenswrapper[5003]: I1206 15:32:23.715767 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="01ab3dd5-8196-46d0-ad33-122e2ca51def" path="/var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes" Dec 06 15:32:23 crc kubenswrapper[5003]: I1206 15:32:23.716262 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" path="/var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes" Dec 06 15:32:23 crc kubenswrapper[5003]: I1206 15:32:23.717659 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09efc573-dbb6-4249-bd59-9b87aba8dd28" path="/var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes" Dec 06 15:32:23 crc kubenswrapper[5003]: I1206 15:32:23.718323 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b574797-001e-440a-8f4e-c0be86edad0f" path="/var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes" Dec 06 15:32:23 crc kubenswrapper[5003]: I1206 15:32:23.719304 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b78653f-4ff9-4508-8672-245ed9b561e3" path="/var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes" Dec 06 15:32:23 crc kubenswrapper[5003]: I1206 15:32:23.719909 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1386a44e-36a2-460c-96d0-0359d2b6f0f5" path="/var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes" Dec 06 15:32:23 crc kubenswrapper[5003]: I1206 15:32:23.720577 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1bf7eb37-55a3-4c65-b768-a94c82151e69" path="/var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes" Dec 06 15:32:23 crc kubenswrapper[5003]: I1206 15:32:23.721463 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1d611f23-29be-4491-8495-bee1670e935f" path="/var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes" Dec 06 15:32:23 crc kubenswrapper[5003]: I1206 15:32:23.722088 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="20b0d48f-5fd6-431c-a545-e3c800c7b866" path="/var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/volumes" Dec 06 15:32:23 crc kubenswrapper[5003]: I1206 15:32:23.723009 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" path="/var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes" Dec 06 15:32:23 crc kubenswrapper[5003]: I1206 15:32:23.723548 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="22c825df-677d-4ca6-82db-3454ed06e783" path="/var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes" Dec 06 15:32:23 crc kubenswrapper[5003]: I1206 15:32:23.724624 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="25e176fe-21b4-4974-b1ed-c8b94f112a7f" path="/var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes" Dec 06 15:32:23 crc kubenswrapper[5003]: I1206 15:32:23.725138 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" path="/var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes" Dec 06 15:32:23 crc kubenswrapper[5003]: I1206 15:32:23.725750 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="31d8b7a1-420e-4252-a5b7-eebe8a111292" path="/var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes" Dec 06 15:32:23 crc kubenswrapper[5003]: I1206 15:32:23.726743 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3ab1a177-2de0-46d9-b765-d0d0649bb42e" path="/var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/volumes" Dec 06 15:32:23 crc kubenswrapper[5003]: I1206 15:32:23.727280 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" path="/var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes" Dec 06 15:32:23 crc kubenswrapper[5003]: I1206 15:32:23.728210 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="43509403-f426-496e-be36-56cef71462f5" path="/var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes" Dec 06 15:32:23 crc kubenswrapper[5003]: I1206 15:32:23.728665 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="44663579-783b-4372-86d6-acf235a62d72" path="/var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/volumes" Dec 06 15:32:23 crc kubenswrapper[5003]: I1206 15:32:23.729221 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="496e6271-fb68-4057-954e-a0d97a4afa3f" path="/var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes" Dec 06 15:32:23 crc kubenswrapper[5003]: I1206 15:32:23.730376 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" path="/var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes" Dec 06 15:32:23 crc kubenswrapper[5003]: I1206 15:32:23.731110 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49ef4625-1d3a-4a9f-b595-c2433d32326d" path="/var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/volumes" Dec 06 15:32:23 crc kubenswrapper[5003]: I1206 15:32:23.732459 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4bb40260-dbaa-4fb0-84df-5e680505d512" path="/var/lib/kubelet/pods/4bb40260-dbaa-4fb0-84df-5e680505d512/volumes" Dec 06 15:32:23 crc kubenswrapper[5003]: I1206 15:32:23.732908 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5225d0e4-402f-4861-b410-819f433b1803" path="/var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes" Dec 06 15:32:23 crc kubenswrapper[5003]: I1206 15:32:23.733897 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5441d097-087c-4d9a-baa8-b210afa90fc9" path="/var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes" Dec 06 15:32:23 crc kubenswrapper[5003]: I1206 15:32:23.734299 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="57a731c4-ef35-47a8-b875-bfb08a7f8011" path="/var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes" Dec 06 15:32:23 crc kubenswrapper[5003]: I1206 15:32:23.734874 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5b88f790-22fa-440e-b583-365168c0b23d" path="/var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/volumes" Dec 06 15:32:23 crc kubenswrapper[5003]: I1206 15:32:23.735872 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5fe579f8-e8a6-4643-bce5-a661393c4dde" path="/var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/volumes" Dec 06 15:32:23 crc kubenswrapper[5003]: I1206 15:32:23.736303 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6402fda4-df10-493c-b4e5-d0569419652d" path="/var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes" Dec 06 15:32:23 crc kubenswrapper[5003]: I1206 15:32:23.737250 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6509e943-70c6-444c-bc41-48a544e36fbd" path="/var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes" Dec 06 15:32:23 crc kubenswrapper[5003]: I1206 15:32:23.737732 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6731426b-95fe-49ff-bb5f-40441049fde2" path="/var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/volumes" Dec 06 15:32:23 crc kubenswrapper[5003]: I1206 15:32:23.738538 5003 kubelet_volumes.go:152] "Cleaned up orphaned volume subpath from pod" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volume-subpaths/run-systemd/ovnkube-controller/6" Dec 06 15:32:23 crc kubenswrapper[5003]: I1206 15:32:23.738632 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volumes" Dec 06 15:32:23 crc kubenswrapper[5003]: I1206 15:32:23.740166 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7539238d-5fe0-46ed-884e-1c3b566537ec" path="/var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes" Dec 06 15:32:23 crc kubenswrapper[5003]: I1206 15:32:23.741415 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7583ce53-e0fe-4a16-9e4d-50516596a136" path="/var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes" Dec 06 15:32:23 crc kubenswrapper[5003]: I1206 15:32:23.742054 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7bb08738-c794-4ee8-9972-3a62ca171029" path="/var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes" Dec 06 15:32:23 crc kubenswrapper[5003]: I1206 15:32:23.744716 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="87cf06ed-a83f-41a7-828d-70653580a8cb" path="/var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes" Dec 06 15:32:23 crc kubenswrapper[5003]: I1206 15:32:23.746235 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" path="/var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes" Dec 06 15:32:23 crc kubenswrapper[5003]: I1206 15:32:23.747436 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="925f1c65-6136-48ba-85aa-3a3b50560753" path="/var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes" Dec 06 15:32:23 crc kubenswrapper[5003]: I1206 15:32:23.748832 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" path="/var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/volumes" Dec 06 15:32:23 crc kubenswrapper[5003]: I1206 15:32:23.750355 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9d4552c7-cd75-42dd-8880-30dd377c49a4" path="/var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes" Dec 06 15:32:23 crc kubenswrapper[5003]: I1206 15:32:23.751366 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" path="/var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/volumes" Dec 06 15:32:23 crc kubenswrapper[5003]: I1206 15:32:23.752694 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a31745f5-9847-4afe-82a5-3161cc66ca93" path="/var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes" Dec 06 15:32:23 crc kubenswrapper[5003]: I1206 15:32:23.754164 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" path="/var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes" Dec 06 15:32:23 crc kubenswrapper[5003]: I1206 15:32:23.757025 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6312bbd-5731-4ea0-a20f-81d5a57df44a" path="/var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/volumes" Dec 06 15:32:23 crc kubenswrapper[5003]: I1206 15:32:23.758426 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" path="/var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes" Dec 06 15:32:23 crc kubenswrapper[5003]: I1206 15:32:23.759833 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" path="/var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes" Dec 06 15:32:23 crc kubenswrapper[5003]: I1206 15:32:23.761457 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" path="/var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/volumes" Dec 06 15:32:23 crc kubenswrapper[5003]: I1206 15:32:23.763572 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bf126b07-da06-4140-9a57-dfd54fc6b486" path="/var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes" Dec 06 15:32:23 crc kubenswrapper[5003]: I1206 15:32:23.764759 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c03ee662-fb2f-4fc4-a2c1-af487c19d254" path="/var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes" Dec 06 15:32:23 crc kubenswrapper[5003]: I1206 15:32:23.765383 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" path="/var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/volumes" Dec 06 15:32:23 crc kubenswrapper[5003]: I1206 15:32:23.765963 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e7e6199b-1264-4501-8953-767f51328d08" path="/var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes" Dec 06 15:32:23 crc kubenswrapper[5003]: I1206 15:32:23.766594 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="efdd0498-1daa-4136-9a4a-3b948c2293fc" path="/var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/volumes" Dec 06 15:32:23 crc kubenswrapper[5003]: I1206 15:32:23.767181 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" path="/var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/volumes" Dec 06 15:32:23 crc kubenswrapper[5003]: I1206 15:32:23.767683 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fda69060-fa79-4696-b1a6-7980f124bf7c" path="/var/lib/kubelet/pods/fda69060-fa79-4696-b1a6-7980f124bf7c/volumes" Dec 06 15:32:24 crc kubenswrapper[5003]: I1206 15:32:24.585914 5003 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 06 15:32:24 crc kubenswrapper[5003]: I1206 15:32:24.587653 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:24 crc kubenswrapper[5003]: I1206 15:32:24.587692 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:24 crc kubenswrapper[5003]: I1206 15:32:24.587704 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:24 crc kubenswrapper[5003]: I1206 15:32:24.587762 5003 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 06 15:32:24 crc kubenswrapper[5003]: I1206 15:32:24.594945 5003 kubelet_node_status.go:115] "Node was previously registered" node="crc" Dec 06 15:32:24 crc kubenswrapper[5003]: I1206 15:32:24.595202 5003 kubelet_node_status.go:79] "Successfully registered node" node="crc" Dec 06 15:32:24 crc kubenswrapper[5003]: I1206 15:32:24.596192 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:24 crc kubenswrapper[5003]: I1206 15:32:24.596229 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:24 crc kubenswrapper[5003]: I1206 15:32:24.596245 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:24 crc kubenswrapper[5003]: I1206 15:32:24.596265 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:24 crc kubenswrapper[5003]: I1206 15:32:24.596282 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:24Z","lastTransitionTime":"2025-12-06T15:32:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:24 crc kubenswrapper[5003]: E1206 15:32:24.615860 5003 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-06T15:32:24Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:24Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-06T15:32:24Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:24Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-06T15:32:24Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:24Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-06T15:32:24Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:24Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"010ec561-c8bb-454b-ab74-658add58caba\\\",\\\"systemUUID\\\":\\\"42b9e6db-6fd4-4e84-a5b7-176f28bfe2f1\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:24Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:24 crc kubenswrapper[5003]: I1206 15:32:24.619181 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:24 crc kubenswrapper[5003]: I1206 15:32:24.619250 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:24 crc kubenswrapper[5003]: I1206 15:32:24.619274 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:24 crc kubenswrapper[5003]: I1206 15:32:24.619306 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:24 crc kubenswrapper[5003]: I1206 15:32:24.619333 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:24Z","lastTransitionTime":"2025-12-06T15:32:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:24 crc kubenswrapper[5003]: E1206 15:32:24.637182 5003 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-06T15:32:24Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:24Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-06T15:32:24Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:24Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-06T15:32:24Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:24Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-06T15:32:24Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:24Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"010ec561-c8bb-454b-ab74-658add58caba\\\",\\\"systemUUID\\\":\\\"42b9e6db-6fd4-4e84-a5b7-176f28bfe2f1\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:24Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:24 crc kubenswrapper[5003]: I1206 15:32:24.641145 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:24 crc kubenswrapper[5003]: I1206 15:32:24.641188 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:24 crc kubenswrapper[5003]: I1206 15:32:24.641200 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:24 crc kubenswrapper[5003]: I1206 15:32:24.641218 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:24 crc kubenswrapper[5003]: I1206 15:32:24.641228 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:24Z","lastTransitionTime":"2025-12-06T15:32:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:24 crc kubenswrapper[5003]: E1206 15:32:24.657752 5003 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-06T15:32:24Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:24Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-06T15:32:24Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:24Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-06T15:32:24Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:24Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-06T15:32:24Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:24Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"010ec561-c8bb-454b-ab74-658add58caba\\\",\\\"systemUUID\\\":\\\"42b9e6db-6fd4-4e84-a5b7-176f28bfe2f1\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:24Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:24 crc kubenswrapper[5003]: I1206 15:32:24.661293 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:24 crc kubenswrapper[5003]: I1206 15:32:24.661337 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:24 crc kubenswrapper[5003]: I1206 15:32:24.661354 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:24 crc kubenswrapper[5003]: I1206 15:32:24.661375 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:24 crc kubenswrapper[5003]: I1206 15:32:24.661389 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:24Z","lastTransitionTime":"2025-12-06T15:32:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:24 crc kubenswrapper[5003]: E1206 15:32:24.675523 5003 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-06T15:32:24Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:24Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-06T15:32:24Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:24Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-06T15:32:24Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:24Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-06T15:32:24Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:24Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"010ec561-c8bb-454b-ab74-658add58caba\\\",\\\"systemUUID\\\":\\\"42b9e6db-6fd4-4e84-a5b7-176f28bfe2f1\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:24Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:24 crc kubenswrapper[5003]: I1206 15:32:24.679727 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:24 crc kubenswrapper[5003]: I1206 15:32:24.679781 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:24 crc kubenswrapper[5003]: I1206 15:32:24.679798 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:24 crc kubenswrapper[5003]: I1206 15:32:24.679821 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:24 crc kubenswrapper[5003]: I1206 15:32:24.679839 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:24Z","lastTransitionTime":"2025-12-06T15:32:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:24 crc kubenswrapper[5003]: E1206 15:32:24.692051 5003 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-06T15:32:24Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:24Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-06T15:32:24Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:24Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-06T15:32:24Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:24Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-06T15:32:24Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:24Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"010ec561-c8bb-454b-ab74-658add58caba\\\",\\\"systemUUID\\\":\\\"42b9e6db-6fd4-4e84-a5b7-176f28bfe2f1\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:24Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:24 crc kubenswrapper[5003]: E1206 15:32:24.692166 5003 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 06 15:32:24 crc kubenswrapper[5003]: I1206 15:32:24.693980 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:24 crc kubenswrapper[5003]: I1206 15:32:24.694044 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:24 crc kubenswrapper[5003]: I1206 15:32:24.694068 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:24 crc kubenswrapper[5003]: I1206 15:32:24.694095 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:24 crc kubenswrapper[5003]: I1206 15:32:24.694118 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:24Z","lastTransitionTime":"2025-12-06T15:32:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:24 crc kubenswrapper[5003]: I1206 15:32:24.796737 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:24 crc kubenswrapper[5003]: I1206 15:32:24.796787 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:24 crc kubenswrapper[5003]: I1206 15:32:24.796806 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:24 crc kubenswrapper[5003]: I1206 15:32:24.796824 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:24 crc kubenswrapper[5003]: I1206 15:32:24.796833 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:24Z","lastTransitionTime":"2025-12-06T15:32:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:24 crc kubenswrapper[5003]: I1206 15:32:24.825609 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"ab9b858ed30211c345e146452a18252752eeedcb3be8054daa2af371297cbb61"} Dec 06 15:32:24 crc kubenswrapper[5003]: I1206 15:32:24.837785 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:24Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:24 crc kubenswrapper[5003]: I1206 15:32:24.847714 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:24Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:24Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ab9b858ed30211c345e146452a18252752eeedcb3be8054daa2af371297cbb61\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:24Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:24 crc kubenswrapper[5003]: I1206 15:32:24.859722 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:24Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:24 crc kubenswrapper[5003]: I1206 15:32:24.871593 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://224b845cf75da20cf87e5435770288110776a0edd92c9c110d6dc539419ecb7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0df37db3b8682032a0065b4ca1cead69bf0b32cb4e2fab89c53e14eb2169b7e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:24Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:24 crc kubenswrapper[5003]: I1206 15:32:24.883145 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:24Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:24 crc kubenswrapper[5003]: I1206 15:32:24.895895 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5ae2d36f-5a31-4da3-aae8-0378c481f230\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9f683cdba22afa5d1d069cc32c6c83addf344c8b0ba3b39b7a2ca527408922c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c777cc5d07ff005aa8001da8a566484710f0253e4a6061e3297a884c6153a7d0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ac8291e7fa9a5ff379474802b6eae771542705aef4c443cc3123837d10dd9e5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae2215e89d65c7bd69288cda74cdd83f0349f57b47f80ff1fa03d60a484558b5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://efc65bc9bb60202069cb51eed80fb62e527399c4189554e791bab3e23993c95c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-06T15:32:20Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1206 15:32:14.957831 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1206 15:32:14.959306 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1213261896/tls.crt::/tmp/serving-cert-1213261896/tls.key\\\\\\\"\\\\nI1206 15:32:20.684925 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1206 15:32:20.690335 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1206 15:32:20.690422 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1206 15:32:20.690568 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1206 15:32:20.690599 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1206 15:32:20.700990 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1206 15:32:20.701016 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1206 15:32:20.701048 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1206 15:32:20.701062 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1206 15:32:20.701075 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1206 15:32:20.701085 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1206 15:32:20.701093 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1206 15:32:20.701100 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1206 15:32:20.702607 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:04Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6013b3a95ef99138e6ea971b51a45f65923c09435a2595bba7af91eebb28d76\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4256540a96bdcd018eb514cf70eea305513bb418afa89c8bfa5179c67822380e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4256540a96bdcd018eb514cf70eea305513bb418afa89c8bfa5179c67822380e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:24Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:24 crc kubenswrapper[5003]: I1206 15:32:24.899294 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:24 crc kubenswrapper[5003]: I1206 15:32:24.899329 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:24 crc kubenswrapper[5003]: I1206 15:32:24.899340 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:24 crc kubenswrapper[5003]: I1206 15:32:24.899356 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:24 crc kubenswrapper[5003]: I1206 15:32:24.899366 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:24Z","lastTransitionTime":"2025-12-06T15:32:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:24 crc kubenswrapper[5003]: I1206 15:32:24.908088 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c48c2555-6e1a-4e2a-801c-de22b016a80d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d0880a937c71f4a7b46ffc4dcc10f50f5db23655dedb5d7e95b36cf0fcb44928\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f5d5d47cea1d06df2f36a0a15126c2419e5051d84842c781921577bec3c65f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5e7cb74b8b6e2a00de6dbbd1689b52fcc3ae9862d40685bab7a805646abd4a3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3377599a5ceebbce7bd9d45a6f78122fc48d86ff917a6dcfc03304ca3361659\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:02Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:24Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:24 crc kubenswrapper[5003]: I1206 15:32:24.920673 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa744e792aa97558246eaa95c80a307138b1a6e08fe5de144b3fff62f3932e20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:24Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:25 crc kubenswrapper[5003]: I1206 15:32:25.001419 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:25 crc kubenswrapper[5003]: I1206 15:32:25.001464 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:25 crc kubenswrapper[5003]: I1206 15:32:25.001473 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:25 crc kubenswrapper[5003]: I1206 15:32:25.001510 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:25 crc kubenswrapper[5003]: I1206 15:32:25.001529 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:25Z","lastTransitionTime":"2025-12-06T15:32:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:25 crc kubenswrapper[5003]: I1206 15:32:25.104840 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:25 crc kubenswrapper[5003]: I1206 15:32:25.104909 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:25 crc kubenswrapper[5003]: I1206 15:32:25.104932 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:25 crc kubenswrapper[5003]: I1206 15:32:25.104958 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:25 crc kubenswrapper[5003]: I1206 15:32:25.104976 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:25Z","lastTransitionTime":"2025-12-06T15:32:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:25 crc kubenswrapper[5003]: I1206 15:32:25.208309 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:25 crc kubenswrapper[5003]: I1206 15:32:25.208360 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:25 crc kubenswrapper[5003]: I1206 15:32:25.208372 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:25 crc kubenswrapper[5003]: I1206 15:32:25.208414 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:25 crc kubenswrapper[5003]: I1206 15:32:25.208427 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:25Z","lastTransitionTime":"2025-12-06T15:32:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:25 crc kubenswrapper[5003]: I1206 15:32:25.312024 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:25 crc kubenswrapper[5003]: I1206 15:32:25.312114 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:25 crc kubenswrapper[5003]: I1206 15:32:25.312140 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:25 crc kubenswrapper[5003]: I1206 15:32:25.312165 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:25 crc kubenswrapper[5003]: I1206 15:32:25.312182 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:25Z","lastTransitionTime":"2025-12-06T15:32:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:25 crc kubenswrapper[5003]: I1206 15:32:25.415302 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:25 crc kubenswrapper[5003]: I1206 15:32:25.415357 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:25 crc kubenswrapper[5003]: I1206 15:32:25.415371 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:25 crc kubenswrapper[5003]: I1206 15:32:25.415388 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:25 crc kubenswrapper[5003]: I1206 15:32:25.415402 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:25Z","lastTransitionTime":"2025-12-06T15:32:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:25 crc kubenswrapper[5003]: I1206 15:32:25.415345 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 06 15:32:25 crc kubenswrapper[5003]: I1206 15:32:25.415663 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 06 15:32:25 crc kubenswrapper[5003]: I1206 15:32:25.415717 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 06 15:32:25 crc kubenswrapper[5003]: I1206 15:32:25.415756 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 06 15:32:25 crc kubenswrapper[5003]: I1206 15:32:25.415791 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 06 15:32:25 crc kubenswrapper[5003]: E1206 15:32:25.415821 5003 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 06 15:32:25 crc kubenswrapper[5003]: E1206 15:32:25.415922 5003 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 06 15:32:25 crc kubenswrapper[5003]: E1206 15:32:25.415959 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-06 15:32:29.415928372 +0000 UTC m=+27.949282783 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 06 15:32:25 crc kubenswrapper[5003]: E1206 15:32:25.416006 5003 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 06 15:32:25 crc kubenswrapper[5003]: E1206 15:32:25.416031 5003 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 06 15:32:25 crc kubenswrapper[5003]: E1206 15:32:25.416032 5003 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 06 15:32:25 crc kubenswrapper[5003]: E1206 15:32:25.416093 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-06 15:32:29.416054695 +0000 UTC m=+27.949409256 (durationBeforeRetry 4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 15:32:25 crc kubenswrapper[5003]: E1206 15:32:25.416039 5003 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 06 15:32:25 crc kubenswrapper[5003]: E1206 15:32:25.416138 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-06 15:32:29.416121037 +0000 UTC m=+27.949475528 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 06 15:32:25 crc kubenswrapper[5003]: E1206 15:32:25.416160 5003 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 06 15:32:25 crc kubenswrapper[5003]: E1206 15:32:25.416165 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-06 15:32:29.416153018 +0000 UTC m=+27.949507529 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 06 15:32:25 crc kubenswrapper[5003]: E1206 15:32:25.416187 5003 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 06 15:32:25 crc kubenswrapper[5003]: E1206 15:32:25.416274 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-06 15:32:29.41625134 +0000 UTC m=+27.949605761 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 06 15:32:25 crc kubenswrapper[5003]: I1206 15:32:25.517927 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:25 crc kubenswrapper[5003]: I1206 15:32:25.517983 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:25 crc kubenswrapper[5003]: I1206 15:32:25.517995 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:25 crc kubenswrapper[5003]: I1206 15:32:25.518011 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:25 crc kubenswrapper[5003]: I1206 15:32:25.518024 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:25Z","lastTransitionTime":"2025-12-06T15:32:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:25 crc kubenswrapper[5003]: I1206 15:32:25.619816 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:25 crc kubenswrapper[5003]: I1206 15:32:25.619867 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:25 crc kubenswrapper[5003]: I1206 15:32:25.619879 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:25 crc kubenswrapper[5003]: I1206 15:32:25.619897 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:25 crc kubenswrapper[5003]: I1206 15:32:25.619911 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:25Z","lastTransitionTime":"2025-12-06T15:32:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:25 crc kubenswrapper[5003]: I1206 15:32:25.712183 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 06 15:32:25 crc kubenswrapper[5003]: I1206 15:32:25.712248 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 06 15:32:25 crc kubenswrapper[5003]: I1206 15:32:25.712259 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 06 15:32:25 crc kubenswrapper[5003]: E1206 15:32:25.712342 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 06 15:32:25 crc kubenswrapper[5003]: E1206 15:32:25.712449 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 06 15:32:25 crc kubenswrapper[5003]: E1206 15:32:25.712585 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 06 15:32:25 crc kubenswrapper[5003]: I1206 15:32:25.722306 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:25 crc kubenswrapper[5003]: I1206 15:32:25.722352 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:25 crc kubenswrapper[5003]: I1206 15:32:25.722363 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:25 crc kubenswrapper[5003]: I1206 15:32:25.722379 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:25 crc kubenswrapper[5003]: I1206 15:32:25.722390 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:25Z","lastTransitionTime":"2025-12-06T15:32:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:25 crc kubenswrapper[5003]: I1206 15:32:25.759046 5003 csr.go:261] certificate signing request csr-9cfjl is approved, waiting to be issued Dec 06 15:32:25 crc kubenswrapper[5003]: I1206 15:32:25.773241 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/node-ca-mdz5n"] Dec 06 15:32:25 crc kubenswrapper[5003]: I1206 15:32:25.773527 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-mdz5n" Dec 06 15:32:25 crc kubenswrapper[5003]: I1206 15:32:25.776663 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Dec 06 15:32:25 crc kubenswrapper[5003]: I1206 15:32:25.776694 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Dec 06 15:32:25 crc kubenswrapper[5003]: I1206 15:32:25.776674 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Dec 06 15:32:25 crc kubenswrapper[5003]: I1206 15:32:25.776856 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Dec 06 15:32:25 crc kubenswrapper[5003]: I1206 15:32:25.777802 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/node-resolver-qcqkl"] Dec 06 15:32:25 crc kubenswrapper[5003]: I1206 15:32:25.778003 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-qcqkl" Dec 06 15:32:25 crc kubenswrapper[5003]: I1206 15:32:25.785153 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Dec 06 15:32:25 crc kubenswrapper[5003]: I1206 15:32:25.785298 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Dec 06 15:32:25 crc kubenswrapper[5003]: I1206 15:32:25.785382 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Dec 06 15:32:25 crc kubenswrapper[5003]: I1206 15:32:25.820087 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wqmpg\" (UniqueName: \"kubernetes.io/projected/2a9b6e3b-054f-40de-9ad6-dd7eb78eaea1-kube-api-access-wqmpg\") pod \"node-ca-mdz5n\" (UID: \"2a9b6e3b-054f-40de-9ad6-dd7eb78eaea1\") " pod="openshift-image-registry/node-ca-mdz5n" Dec 06 15:32:25 crc kubenswrapper[5003]: I1206 15:32:25.820131 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/956ed4a8-8918-48eb-a2f8-f35a9ae17fde-hosts-file\") pod \"node-resolver-qcqkl\" (UID: \"956ed4a8-8918-48eb-a2f8-f35a9ae17fde\") " pod="openshift-dns/node-resolver-qcqkl" Dec 06 15:32:25 crc kubenswrapper[5003]: I1206 15:32:25.820171 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/2a9b6e3b-054f-40de-9ad6-dd7eb78eaea1-serviceca\") pod \"node-ca-mdz5n\" (UID: \"2a9b6e3b-054f-40de-9ad6-dd7eb78eaea1\") " pod="openshift-image-registry/node-ca-mdz5n" Dec 06 15:32:25 crc kubenswrapper[5003]: I1206 15:32:25.820203 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/2a9b6e3b-054f-40de-9ad6-dd7eb78eaea1-host\") pod \"node-ca-mdz5n\" (UID: \"2a9b6e3b-054f-40de-9ad6-dd7eb78eaea1\") " pod="openshift-image-registry/node-ca-mdz5n" Dec 06 15:32:25 crc kubenswrapper[5003]: I1206 15:32:25.820268 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fbhrw\" (UniqueName: \"kubernetes.io/projected/956ed4a8-8918-48eb-a2f8-f35a9ae17fde-kube-api-access-fbhrw\") pod \"node-resolver-qcqkl\" (UID: \"956ed4a8-8918-48eb-a2f8-f35a9ae17fde\") " pod="openshift-dns/node-resolver-qcqkl" Dec 06 15:32:25 crc kubenswrapper[5003]: I1206 15:32:25.824270 5003 csr.go:257] certificate signing request csr-9cfjl is issued Dec 06 15:32:25 crc kubenswrapper[5003]: I1206 15:32:25.824526 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:25 crc kubenswrapper[5003]: I1206 15:32:25.824560 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:25 crc kubenswrapper[5003]: I1206 15:32:25.824569 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:25 crc kubenswrapper[5003]: I1206 15:32:25.824583 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:25 crc kubenswrapper[5003]: I1206 15:32:25.824593 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:25Z","lastTransitionTime":"2025-12-06T15:32:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:25 crc kubenswrapper[5003]: I1206 15:32:25.828133 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c48c2555-6e1a-4e2a-801c-de22b016a80d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d0880a937c71f4a7b46ffc4dcc10f50f5db23655dedb5d7e95b36cf0fcb44928\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f5d5d47cea1d06df2f36a0a15126c2419e5051d84842c781921577bec3c65f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5e7cb74b8b6e2a00de6dbbd1689b52fcc3ae9862d40685bab7a805646abd4a3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3377599a5ceebbce7bd9d45a6f78122fc48d86ff917a6dcfc03304ca3361659\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:02Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:25Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:25 crc kubenswrapper[5003]: I1206 15:32:25.840449 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa744e792aa97558246eaa95c80a307138b1a6e08fe5de144b3fff62f3932e20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:25Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:25 crc kubenswrapper[5003]: I1206 15:32:25.852869 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://224b845cf75da20cf87e5435770288110776a0edd92c9c110d6dc539419ecb7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0df37db3b8682032a0065b4ca1cead69bf0b32cb4e2fab89c53e14eb2169b7e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:25Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:26 crc kubenswrapper[5003]: I1206 15:32:25.920699 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wqmpg\" (UniqueName: \"kubernetes.io/projected/2a9b6e3b-054f-40de-9ad6-dd7eb78eaea1-kube-api-access-wqmpg\") pod \"node-ca-mdz5n\" (UID: \"2a9b6e3b-054f-40de-9ad6-dd7eb78eaea1\") " pod="openshift-image-registry/node-ca-mdz5n" Dec 06 15:32:26 crc kubenswrapper[5003]: I1206 15:32:25.920738 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/956ed4a8-8918-48eb-a2f8-f35a9ae17fde-hosts-file\") pod \"node-resolver-qcqkl\" (UID: \"956ed4a8-8918-48eb-a2f8-f35a9ae17fde\") " pod="openshift-dns/node-resolver-qcqkl" Dec 06 15:32:26 crc kubenswrapper[5003]: I1206 15:32:25.920761 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/2a9b6e3b-054f-40de-9ad6-dd7eb78eaea1-serviceca\") pod \"node-ca-mdz5n\" (UID: \"2a9b6e3b-054f-40de-9ad6-dd7eb78eaea1\") " pod="openshift-image-registry/node-ca-mdz5n" Dec 06 15:32:26 crc kubenswrapper[5003]: I1206 15:32:25.920780 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/2a9b6e3b-054f-40de-9ad6-dd7eb78eaea1-host\") pod \"node-ca-mdz5n\" (UID: \"2a9b6e3b-054f-40de-9ad6-dd7eb78eaea1\") " pod="openshift-image-registry/node-ca-mdz5n" Dec 06 15:32:26 crc kubenswrapper[5003]: I1206 15:32:25.920794 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fbhrw\" (UniqueName: \"kubernetes.io/projected/956ed4a8-8918-48eb-a2f8-f35a9ae17fde-kube-api-access-fbhrw\") pod \"node-resolver-qcqkl\" (UID: \"956ed4a8-8918-48eb-a2f8-f35a9ae17fde\") " pod="openshift-dns/node-resolver-qcqkl" Dec 06 15:32:26 crc kubenswrapper[5003]: I1206 15:32:25.921019 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/2a9b6e3b-054f-40de-9ad6-dd7eb78eaea1-host\") pod \"node-ca-mdz5n\" (UID: \"2a9b6e3b-054f-40de-9ad6-dd7eb78eaea1\") " pod="openshift-image-registry/node-ca-mdz5n" Dec 06 15:32:26 crc kubenswrapper[5003]: I1206 15:32:25.921063 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/956ed4a8-8918-48eb-a2f8-f35a9ae17fde-hosts-file\") pod \"node-resolver-qcqkl\" (UID: \"956ed4a8-8918-48eb-a2f8-f35a9ae17fde\") " pod="openshift-dns/node-resolver-qcqkl" Dec 06 15:32:26 crc kubenswrapper[5003]: I1206 15:32:25.922439 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/2a9b6e3b-054f-40de-9ad6-dd7eb78eaea1-serviceca\") pod \"node-ca-mdz5n\" (UID: \"2a9b6e3b-054f-40de-9ad6-dd7eb78eaea1\") " pod="openshift-image-registry/node-ca-mdz5n" Dec 06 15:32:26 crc kubenswrapper[5003]: I1206 15:32:26.016828 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fbhrw\" (UniqueName: \"kubernetes.io/projected/956ed4a8-8918-48eb-a2f8-f35a9ae17fde-kube-api-access-fbhrw\") pod \"node-resolver-qcqkl\" (UID: \"956ed4a8-8918-48eb-a2f8-f35a9ae17fde\") " pod="openshift-dns/node-resolver-qcqkl" Dec 06 15:32:26 crc kubenswrapper[5003]: I1206 15:32:26.024287 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wqmpg\" (UniqueName: \"kubernetes.io/projected/2a9b6e3b-054f-40de-9ad6-dd7eb78eaea1-kube-api-access-wqmpg\") pod \"node-ca-mdz5n\" (UID: \"2a9b6e3b-054f-40de-9ad6-dd7eb78eaea1\") " pod="openshift-image-registry/node-ca-mdz5n" Dec 06 15:32:26 crc kubenswrapper[5003]: I1206 15:32:26.036866 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:26 crc kubenswrapper[5003]: I1206 15:32:26.036908 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:26 crc kubenswrapper[5003]: I1206 15:32:26.036930 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:26 crc kubenswrapper[5003]: I1206 15:32:26.036945 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:26 crc kubenswrapper[5003]: I1206 15:32:26.036955 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:26Z","lastTransitionTime":"2025-12-06T15:32:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:26 crc kubenswrapper[5003]: I1206 15:32:26.044612 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:26Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:26 crc kubenswrapper[5003]: I1206 15:32:26.069442 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5ae2d36f-5a31-4da3-aae8-0378c481f230\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9f683cdba22afa5d1d069cc32c6c83addf344c8b0ba3b39b7a2ca527408922c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c777cc5d07ff005aa8001da8a566484710f0253e4a6061e3297a884c6153a7d0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ac8291e7fa9a5ff379474802b6eae771542705aef4c443cc3123837d10dd9e5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae2215e89d65c7bd69288cda74cdd83f0349f57b47f80ff1fa03d60a484558b5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://efc65bc9bb60202069cb51eed80fb62e527399c4189554e791bab3e23993c95c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-06T15:32:20Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1206 15:32:14.957831 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1206 15:32:14.959306 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1213261896/tls.crt::/tmp/serving-cert-1213261896/tls.key\\\\\\\"\\\\nI1206 15:32:20.684925 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1206 15:32:20.690335 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1206 15:32:20.690422 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1206 15:32:20.690568 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1206 15:32:20.690599 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1206 15:32:20.700990 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1206 15:32:20.701016 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1206 15:32:20.701048 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1206 15:32:20.701062 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1206 15:32:20.701075 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1206 15:32:20.701085 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1206 15:32:20.701093 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1206 15:32:20.701100 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1206 15:32:20.702607 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:04Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6013b3a95ef99138e6ea971b51a45f65923c09435a2595bba7af91eebb28d76\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4256540a96bdcd018eb514cf70eea305513bb418afa89c8bfa5179c67822380e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4256540a96bdcd018eb514cf70eea305513bb418afa89c8bfa5179c67822380e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:26Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:26 crc kubenswrapper[5003]: I1206 15:32:26.089445 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-mdz5n" Dec 06 15:32:26 crc kubenswrapper[5003]: I1206 15:32:26.093241 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:24Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:24Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ab9b858ed30211c345e146452a18252752eeedcb3be8054daa2af371297cbb61\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:26Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:26 crc kubenswrapper[5003]: I1206 15:32:26.114020 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:26Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:26 crc kubenswrapper[5003]: I1206 15:32:26.130538 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-qcqkl" Dec 06 15:32:26 crc kubenswrapper[5003]: I1206 15:32:26.145767 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:26 crc kubenswrapper[5003]: I1206 15:32:26.145796 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:26 crc kubenswrapper[5003]: I1206 15:32:26.145807 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:26 crc kubenswrapper[5003]: I1206 15:32:26.145823 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:26 crc kubenswrapper[5003]: I1206 15:32:26.145836 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:26Z","lastTransitionTime":"2025-12-06T15:32:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:26 crc kubenswrapper[5003]: W1206 15:32:26.170459 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod956ed4a8_8918_48eb_a2f8_f35a9ae17fde.slice/crio-c4dc4af2194813f1c1125836fcf7d8c69422f76e9febad2c87197b83c7381e43 WatchSource:0}: Error finding container c4dc4af2194813f1c1125836fcf7d8c69422f76e9febad2c87197b83c7381e43: Status 404 returned error can't find the container with id c4dc4af2194813f1c1125836fcf7d8c69422f76e9febad2c87197b83c7381e43 Dec 06 15:32:26 crc kubenswrapper[5003]: I1206 15:32:26.170608 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-mdz5n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2a9b6e3b-054f-40de-9ad6-dd7eb78eaea1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:25Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:25Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wqmpg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:25Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-mdz5n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:26Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:26 crc kubenswrapper[5003]: I1206 15:32:26.198254 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:26Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:26 crc kubenswrapper[5003]: I1206 15:32:26.238729 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-qcqkl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"956ed4a8-8918-48eb-a2f8-f35a9ae17fde\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:25Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:25Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbhrw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-qcqkl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:26Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:26 crc kubenswrapper[5003]: I1206 15:32:26.249949 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:26 crc kubenswrapper[5003]: I1206 15:32:26.249986 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:26 crc kubenswrapper[5003]: I1206 15:32:26.249995 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:26 crc kubenswrapper[5003]: I1206 15:32:26.250009 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:26 crc kubenswrapper[5003]: I1206 15:32:26.250019 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:26Z","lastTransitionTime":"2025-12-06T15:32:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:26 crc kubenswrapper[5003]: I1206 15:32:26.267664 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5ae2d36f-5a31-4da3-aae8-0378c481f230\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9f683cdba22afa5d1d069cc32c6c83addf344c8b0ba3b39b7a2ca527408922c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c777cc5d07ff005aa8001da8a566484710f0253e4a6061e3297a884c6153a7d0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ac8291e7fa9a5ff379474802b6eae771542705aef4c443cc3123837d10dd9e5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae2215e89d65c7bd69288cda74cdd83f0349f57b47f80ff1fa03d60a484558b5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://efc65bc9bb60202069cb51eed80fb62e527399c4189554e791bab3e23993c95c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-06T15:32:20Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1206 15:32:14.957831 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1206 15:32:14.959306 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1213261896/tls.crt::/tmp/serving-cert-1213261896/tls.key\\\\\\\"\\\\nI1206 15:32:20.684925 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1206 15:32:20.690335 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1206 15:32:20.690422 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1206 15:32:20.690568 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1206 15:32:20.690599 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1206 15:32:20.700990 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1206 15:32:20.701016 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1206 15:32:20.701048 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1206 15:32:20.701062 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1206 15:32:20.701075 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1206 15:32:20.701085 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1206 15:32:20.701093 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1206 15:32:20.701100 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1206 15:32:20.702607 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:04Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6013b3a95ef99138e6ea971b51a45f65923c09435a2595bba7af91eebb28d76\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4256540a96bdcd018eb514cf70eea305513bb418afa89c8bfa5179c67822380e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4256540a96bdcd018eb514cf70eea305513bb418afa89c8bfa5179c67822380e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:26Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:26 crc kubenswrapper[5003]: I1206 15:32:26.280716 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c48c2555-6e1a-4e2a-801c-de22b016a80d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d0880a937c71f4a7b46ffc4dcc10f50f5db23655dedb5d7e95b36cf0fcb44928\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f5d5d47cea1d06df2f36a0a15126c2419e5051d84842c781921577bec3c65f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5e7cb74b8b6e2a00de6dbbd1689b52fcc3ae9862d40685bab7a805646abd4a3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3377599a5ceebbce7bd9d45a6f78122fc48d86ff917a6dcfc03304ca3361659\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:02Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:26Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:26 crc kubenswrapper[5003]: I1206 15:32:26.293504 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa744e792aa97558246eaa95c80a307138b1a6e08fe5de144b3fff62f3932e20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:26Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:26 crc kubenswrapper[5003]: I1206 15:32:26.312874 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://224b845cf75da20cf87e5435770288110776a0edd92c9c110d6dc539419ecb7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0df37db3b8682032a0065b4ca1cead69bf0b32cb4e2fab89c53e14eb2169b7e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:26Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:26 crc kubenswrapper[5003]: I1206 15:32:26.331936 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:26Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:26 crc kubenswrapper[5003]: I1206 15:32:26.353858 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:26Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:26 crc kubenswrapper[5003]: I1206 15:32:26.354788 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:26 crc kubenswrapper[5003]: I1206 15:32:26.354826 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:26 crc kubenswrapper[5003]: I1206 15:32:26.354836 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:26 crc kubenswrapper[5003]: I1206 15:32:26.354853 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:26 crc kubenswrapper[5003]: I1206 15:32:26.354864 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:26Z","lastTransitionTime":"2025-12-06T15:32:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:26 crc kubenswrapper[5003]: I1206 15:32:26.370808 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:24Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:24Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ab9b858ed30211c345e146452a18252752eeedcb3be8054daa2af371297cbb61\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:26Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:26 crc kubenswrapper[5003]: I1206 15:32:26.382519 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:26Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:26 crc kubenswrapper[5003]: I1206 15:32:26.398617 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-mdz5n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2a9b6e3b-054f-40de-9ad6-dd7eb78eaea1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:25Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:25Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wqmpg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:25Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-mdz5n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:26Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:26 crc kubenswrapper[5003]: I1206 15:32:26.459192 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:26 crc kubenswrapper[5003]: I1206 15:32:26.459235 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:26 crc kubenswrapper[5003]: I1206 15:32:26.459244 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:26 crc kubenswrapper[5003]: I1206 15:32:26.459263 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:26 crc kubenswrapper[5003]: I1206 15:32:26.459273 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:26Z","lastTransitionTime":"2025-12-06T15:32:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:26 crc kubenswrapper[5003]: I1206 15:32:26.562095 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:26 crc kubenswrapper[5003]: I1206 15:32:26.562154 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:26 crc kubenswrapper[5003]: I1206 15:32:26.562164 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:26 crc kubenswrapper[5003]: I1206 15:32:26.562185 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:26 crc kubenswrapper[5003]: I1206 15:32:26.562200 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:26Z","lastTransitionTime":"2025-12-06T15:32:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:26 crc kubenswrapper[5003]: I1206 15:32:26.665591 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:26 crc kubenswrapper[5003]: I1206 15:32:26.665651 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:26 crc kubenswrapper[5003]: I1206 15:32:26.665665 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:26 crc kubenswrapper[5003]: I1206 15:32:26.665682 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:26 crc kubenswrapper[5003]: I1206 15:32:26.665695 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:26Z","lastTransitionTime":"2025-12-06T15:32:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:26 crc kubenswrapper[5003]: I1206 15:32:26.768198 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:26 crc kubenswrapper[5003]: I1206 15:32:26.768243 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:26 crc kubenswrapper[5003]: I1206 15:32:26.768251 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:26 crc kubenswrapper[5003]: I1206 15:32:26.768266 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:26 crc kubenswrapper[5003]: I1206 15:32:26.768276 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:26Z","lastTransitionTime":"2025-12-06T15:32:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:26 crc kubenswrapper[5003]: I1206 15:32:26.825337 5003 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate expiration is 2026-12-06 15:27:25 +0000 UTC, rotation deadline is 2026-09-18 19:31:22.613614948 +0000 UTC Dec 06 15:32:26 crc kubenswrapper[5003]: I1206 15:32:26.825410 5003 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Waiting 6867h58m55.788210502s for next certificate rotation Dec 06 15:32:26 crc kubenswrapper[5003]: I1206 15:32:26.843644 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-qcqkl" event={"ID":"956ed4a8-8918-48eb-a2f8-f35a9ae17fde","Type":"ContainerStarted","Data":"8ae5ee47d26422cffca5f12a4ee1455dcd34c148b1b490d69b88280b4497da11"} Dec 06 15:32:26 crc kubenswrapper[5003]: I1206 15:32:26.843703 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-qcqkl" event={"ID":"956ed4a8-8918-48eb-a2f8-f35a9ae17fde","Type":"ContainerStarted","Data":"c4dc4af2194813f1c1125836fcf7d8c69422f76e9febad2c87197b83c7381e43"} Dec 06 15:32:26 crc kubenswrapper[5003]: I1206 15:32:26.845162 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-mdz5n" event={"ID":"2a9b6e3b-054f-40de-9ad6-dd7eb78eaea1","Type":"ContainerStarted","Data":"cac97fc8a54c2fbd81b0abade11a987e5226084d8a4f75ddbbaad1f6c4cb9783"} Dec 06 15:32:26 crc kubenswrapper[5003]: I1206 15:32:26.845201 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-mdz5n" event={"ID":"2a9b6e3b-054f-40de-9ad6-dd7eb78eaea1","Type":"ContainerStarted","Data":"e8b11fd3faf1780ffb0be7854905239b21c4eb2b4d0f273e16ab1f95ba4d5bde"} Dec 06 15:32:26 crc kubenswrapper[5003]: I1206 15:32:26.869934 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:26 crc kubenswrapper[5003]: I1206 15:32:26.869967 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:26 crc kubenswrapper[5003]: I1206 15:32:26.869975 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:26 crc kubenswrapper[5003]: I1206 15:32:26.869988 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:26 crc kubenswrapper[5003]: I1206 15:32:26.869998 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:26Z","lastTransitionTime":"2025-12-06T15:32:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:26 crc kubenswrapper[5003]: I1206 15:32:26.895079 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-9kdpn"] Dec 06 15:32:26 crc kubenswrapper[5003]: I1206 15:32:26.895539 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-9kdpn" Dec 06 15:32:26 crc kubenswrapper[5003]: W1206 15:32:26.899308 5003 reflector.go:561] object-"openshift-multus"/"default-dockercfg-2q5b6": failed to list *v1.Secret: secrets "default-dockercfg-2q5b6" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-multus": no relationship found between node 'crc' and this object Dec 06 15:32:26 crc kubenswrapper[5003]: E1206 15:32:26.899356 5003 reflector.go:158] "Unhandled Error" err="object-\"openshift-multus\"/\"default-dockercfg-2q5b6\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"default-dockercfg-2q5b6\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-multus\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 06 15:32:26 crc kubenswrapper[5003]: I1206 15:32:26.930528 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/350e8b9a-b7bf-4dc9-abe9-d10f7a088be3-cnibin\") pod \"multus-9kdpn\" (UID: \"350e8b9a-b7bf-4dc9-abe9-d10f7a088be3\") " pod="openshift-multus/multus-9kdpn" Dec 06 15:32:26 crc kubenswrapper[5003]: I1206 15:32:26.930569 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/350e8b9a-b7bf-4dc9-abe9-d10f7a088be3-host-run-k8s-cni-cncf-io\") pod \"multus-9kdpn\" (UID: \"350e8b9a-b7bf-4dc9-abe9-d10f7a088be3\") " pod="openshift-multus/multus-9kdpn" Dec 06 15:32:26 crc kubenswrapper[5003]: I1206 15:32:26.930600 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/350e8b9a-b7bf-4dc9-abe9-d10f7a088be3-host-var-lib-cni-multus\") pod \"multus-9kdpn\" (UID: \"350e8b9a-b7bf-4dc9-abe9-d10f7a088be3\") " pod="openshift-multus/multus-9kdpn" Dec 06 15:32:26 crc kubenswrapper[5003]: I1206 15:32:26.930616 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/350e8b9a-b7bf-4dc9-abe9-d10f7a088be3-multus-conf-dir\") pod \"multus-9kdpn\" (UID: \"350e8b9a-b7bf-4dc9-abe9-d10f7a088be3\") " pod="openshift-multus/multus-9kdpn" Dec 06 15:32:26 crc kubenswrapper[5003]: I1206 15:32:26.930828 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/350e8b9a-b7bf-4dc9-abe9-d10f7a088be3-host-run-multus-certs\") pod \"multus-9kdpn\" (UID: \"350e8b9a-b7bf-4dc9-abe9-d10f7a088be3\") " pod="openshift-multus/multus-9kdpn" Dec 06 15:32:26 crc kubenswrapper[5003]: I1206 15:32:26.930875 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/350e8b9a-b7bf-4dc9-abe9-d10f7a088be3-os-release\") pod \"multus-9kdpn\" (UID: \"350e8b9a-b7bf-4dc9-abe9-d10f7a088be3\") " pod="openshift-multus/multus-9kdpn" Dec 06 15:32:26 crc kubenswrapper[5003]: I1206 15:32:26.931067 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/350e8b9a-b7bf-4dc9-abe9-d10f7a088be3-multus-socket-dir-parent\") pod \"multus-9kdpn\" (UID: \"350e8b9a-b7bf-4dc9-abe9-d10f7a088be3\") " pod="openshift-multus/multus-9kdpn" Dec 06 15:32:26 crc kubenswrapper[5003]: I1206 15:32:26.931125 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/350e8b9a-b7bf-4dc9-abe9-d10f7a088be3-host-run-netns\") pod \"multus-9kdpn\" (UID: \"350e8b9a-b7bf-4dc9-abe9-d10f7a088be3\") " pod="openshift-multus/multus-9kdpn" Dec 06 15:32:26 crc kubenswrapper[5003]: I1206 15:32:26.931161 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/350e8b9a-b7bf-4dc9-abe9-d10f7a088be3-host-var-lib-kubelet\") pod \"multus-9kdpn\" (UID: \"350e8b9a-b7bf-4dc9-abe9-d10f7a088be3\") " pod="openshift-multus/multus-9kdpn" Dec 06 15:32:26 crc kubenswrapper[5003]: I1206 15:32:26.931189 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/350e8b9a-b7bf-4dc9-abe9-d10f7a088be3-multus-daemon-config\") pod \"multus-9kdpn\" (UID: \"350e8b9a-b7bf-4dc9-abe9-d10f7a088be3\") " pod="openshift-multus/multus-9kdpn" Dec 06 15:32:26 crc kubenswrapper[5003]: I1206 15:32:26.931215 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/350e8b9a-b7bf-4dc9-abe9-d10f7a088be3-cni-binary-copy\") pod \"multus-9kdpn\" (UID: \"350e8b9a-b7bf-4dc9-abe9-d10f7a088be3\") " pod="openshift-multus/multus-9kdpn" Dec 06 15:32:26 crc kubenswrapper[5003]: I1206 15:32:26.931283 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/350e8b9a-b7bf-4dc9-abe9-d10f7a088be3-hostroot\") pod \"multus-9kdpn\" (UID: \"350e8b9a-b7bf-4dc9-abe9-d10f7a088be3\") " pod="openshift-multus/multus-9kdpn" Dec 06 15:32:26 crc kubenswrapper[5003]: I1206 15:32:26.931328 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/350e8b9a-b7bf-4dc9-abe9-d10f7a088be3-etc-kubernetes\") pod \"multus-9kdpn\" (UID: \"350e8b9a-b7bf-4dc9-abe9-d10f7a088be3\") " pod="openshift-multus/multus-9kdpn" Dec 06 15:32:26 crc kubenswrapper[5003]: I1206 15:32:26.931357 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/350e8b9a-b7bf-4dc9-abe9-d10f7a088be3-system-cni-dir\") pod \"multus-9kdpn\" (UID: \"350e8b9a-b7bf-4dc9-abe9-d10f7a088be3\") " pod="openshift-multus/multus-9kdpn" Dec 06 15:32:26 crc kubenswrapper[5003]: I1206 15:32:26.931426 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/350e8b9a-b7bf-4dc9-abe9-d10f7a088be3-multus-cni-dir\") pod \"multus-9kdpn\" (UID: \"350e8b9a-b7bf-4dc9-abe9-d10f7a088be3\") " pod="openshift-multus/multus-9kdpn" Dec 06 15:32:26 crc kubenswrapper[5003]: I1206 15:32:26.931462 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/350e8b9a-b7bf-4dc9-abe9-d10f7a088be3-host-var-lib-cni-bin\") pod \"multus-9kdpn\" (UID: \"350e8b9a-b7bf-4dc9-abe9-d10f7a088be3\") " pod="openshift-multus/multus-9kdpn" Dec 06 15:32:26 crc kubenswrapper[5003]: I1206 15:32:26.931515 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-46c8r\" (UniqueName: \"kubernetes.io/projected/350e8b9a-b7bf-4dc9-abe9-d10f7a088be3-kube-api-access-46c8r\") pod \"multus-9kdpn\" (UID: \"350e8b9a-b7bf-4dc9-abe9-d10f7a088be3\") " pod="openshift-multus/multus-9kdpn" Dec 06 15:32:26 crc kubenswrapper[5003]: W1206 15:32:26.935282 5003 reflector.go:561] object-"openshift-multus"/"multus-daemon-config": failed to list *v1.ConfigMap: configmaps "multus-daemon-config" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-multus": no relationship found between node 'crc' and this object Dec 06 15:32:26 crc kubenswrapper[5003]: E1206 15:32:26.935337 5003 reflector.go:158] "Unhandled Error" err="object-\"openshift-multus\"/\"multus-daemon-config\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"multus-daemon-config\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-multus\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 06 15:32:26 crc kubenswrapper[5003]: W1206 15:32:26.935601 5003 reflector.go:561] object-"openshift-multus"/"cni-copy-resources": failed to list *v1.ConfigMap: configmaps "cni-copy-resources" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-multus": no relationship found between node 'crc' and this object Dec 06 15:32:26 crc kubenswrapper[5003]: E1206 15:32:26.935637 5003 reflector.go:158] "Unhandled Error" err="object-\"openshift-multus\"/\"cni-copy-resources\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"cni-copy-resources\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-multus\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 06 15:32:26 crc kubenswrapper[5003]: W1206 15:32:26.955340 5003 reflector.go:561] object-"openshift-multus"/"kube-root-ca.crt": failed to list *v1.ConfigMap: configmaps "kube-root-ca.crt" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-multus": no relationship found between node 'crc' and this object Dec 06 15:32:26 crc kubenswrapper[5003]: E1206 15:32:26.955395 5003 reflector.go:158] "Unhandled Error" err="object-\"openshift-multus\"/\"kube-root-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-root-ca.crt\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-multus\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 06 15:32:26 crc kubenswrapper[5003]: I1206 15:32:26.957278 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Dec 06 15:32:26 crc kubenswrapper[5003]: I1206 15:32:26.963315 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-daemon-w25db"] Dec 06 15:32:26 crc kubenswrapper[5003]: I1206 15:32:26.963849 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-w25db" Dec 06 15:32:26 crc kubenswrapper[5003]: I1206 15:32:26.965254 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-additional-cni-plugins-j4rf7"] Dec 06 15:32:26 crc kubenswrapper[5003]: I1206 15:32:26.965943 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-p7xwd"] Dec 06 15:32:26 crc kubenswrapper[5003]: I1206 15:32:26.966656 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-p7xwd" Dec 06 15:32:26 crc kubenswrapper[5003]: I1206 15:32:26.967073 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-j4rf7" Dec 06 15:32:26 crc kubenswrapper[5003]: I1206 15:32:26.971821 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:26 crc kubenswrapper[5003]: I1206 15:32:26.971864 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:26 crc kubenswrapper[5003]: I1206 15:32:26.971881 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:26 crc kubenswrapper[5003]: I1206 15:32:26.971902 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:26 crc kubenswrapper[5003]: I1206 15:32:26.971917 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:26Z","lastTransitionTime":"2025-12-06T15:32:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:26 crc kubenswrapper[5003]: I1206 15:32:26.979259 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Dec 06 15:32:26 crc kubenswrapper[5003]: I1206 15:32:26.979280 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Dec 06 15:32:26 crc kubenswrapper[5003]: I1206 15:32:26.979364 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Dec 06 15:32:26 crc kubenswrapper[5003]: I1206 15:32:26.979969 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Dec 06 15:32:26 crc kubenswrapper[5003]: I1206 15:32:26.980666 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:26Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:26 crc kubenswrapper[5003]: I1206 15:32:26.981352 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Dec 06 15:32:26 crc kubenswrapper[5003]: I1206 15:32:26.981498 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Dec 06 15:32:26 crc kubenswrapper[5003]: I1206 15:32:26.981622 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Dec 06 15:32:26 crc kubenswrapper[5003]: I1206 15:32:26.981742 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Dec 06 15:32:26 crc kubenswrapper[5003]: I1206 15:32:26.981830 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Dec 06 15:32:26 crc kubenswrapper[5003]: I1206 15:32:26.981925 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Dec 06 15:32:26 crc kubenswrapper[5003]: I1206 15:32:26.984794 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.006480 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.006483 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.006511 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.036752 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/8a695d94-271c-45bc-8a89-dfdecb57ec00-ovn-node-metrics-cert\") pod \"ovnkube-node-p7xwd\" (UID: \"8a695d94-271c-45bc-8a89-dfdecb57ec00\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7xwd" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.036793 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/350e8b9a-b7bf-4dc9-abe9-d10f7a088be3-os-release\") pod \"multus-9kdpn\" (UID: \"350e8b9a-b7bf-4dc9-abe9-d10f7a088be3\") " pod="openshift-multus/multus-9kdpn" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.036815 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/350e8b9a-b7bf-4dc9-abe9-d10f7a088be3-multus-socket-dir-parent\") pod \"multus-9kdpn\" (UID: \"350e8b9a-b7bf-4dc9-abe9-d10f7a088be3\") " pod="openshift-multus/multus-9kdpn" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.036832 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/350e8b9a-b7bf-4dc9-abe9-d10f7a088be3-multus-conf-dir\") pod \"multus-9kdpn\" (UID: \"350e8b9a-b7bf-4dc9-abe9-d10f7a088be3\") " pod="openshift-multus/multus-9kdpn" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.036854 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/7dc41f9e-e763-4a9a-a064-f65bc24332b9-system-cni-dir\") pod \"multus-additional-cni-plugins-j4rf7\" (UID: \"7dc41f9e-e763-4a9a-a064-f65bc24332b9\") " pod="openshift-multus/multus-additional-cni-plugins-j4rf7" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.036873 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/7dc41f9e-e763-4a9a-a064-f65bc24332b9-cnibin\") pod \"multus-additional-cni-plugins-j4rf7\" (UID: \"7dc41f9e-e763-4a9a-a064-f65bc24332b9\") " pod="openshift-multus/multus-additional-cni-plugins-j4rf7" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.036891 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qltc5\" (UniqueName: \"kubernetes.io/projected/7dc41f9e-e763-4a9a-a064-f65bc24332b9-kube-api-access-qltc5\") pod \"multus-additional-cni-plugins-j4rf7\" (UID: \"7dc41f9e-e763-4a9a-a064-f65bc24332b9\") " pod="openshift-multus/multus-additional-cni-plugins-j4rf7" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.036909 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/8a695d94-271c-45bc-8a89-dfdecb57ec00-log-socket\") pod \"ovnkube-node-p7xwd\" (UID: \"8a695d94-271c-45bc-8a89-dfdecb57ec00\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7xwd" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.036928 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/350e8b9a-b7bf-4dc9-abe9-d10f7a088be3-system-cni-dir\") pod \"multus-9kdpn\" (UID: \"350e8b9a-b7bf-4dc9-abe9-d10f7a088be3\") " pod="openshift-multus/multus-9kdpn" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.036945 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/350e8b9a-b7bf-4dc9-abe9-d10f7a088be3-cni-binary-copy\") pod \"multus-9kdpn\" (UID: \"350e8b9a-b7bf-4dc9-abe9-d10f7a088be3\") " pod="openshift-multus/multus-9kdpn" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.036961 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/350e8b9a-b7bf-4dc9-abe9-d10f7a088be3-hostroot\") pod \"multus-9kdpn\" (UID: \"350e8b9a-b7bf-4dc9-abe9-d10f7a088be3\") " pod="openshift-multus/multus-9kdpn" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.036977 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/8a695d94-271c-45bc-8a89-dfdecb57ec00-host-cni-netd\") pod \"ovnkube-node-p7xwd\" (UID: \"8a695d94-271c-45bc-8a89-dfdecb57ec00\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7xwd" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.036996 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/8a695d94-271c-45bc-8a89-dfdecb57ec00-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-p7xwd\" (UID: \"8a695d94-271c-45bc-8a89-dfdecb57ec00\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7xwd" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.037016 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/7dc41f9e-e763-4a9a-a064-f65bc24332b9-os-release\") pod \"multus-additional-cni-plugins-j4rf7\" (UID: \"7dc41f9e-e763-4a9a-a064-f65bc24332b9\") " pod="openshift-multus/multus-additional-cni-plugins-j4rf7" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.037032 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/8a695d94-271c-45bc-8a89-dfdecb57ec00-node-log\") pod \"ovnkube-node-p7xwd\" (UID: \"8a695d94-271c-45bc-8a89-dfdecb57ec00\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7xwd" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.037059 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/8a695d94-271c-45bc-8a89-dfdecb57ec00-env-overrides\") pod \"ovnkube-node-p7xwd\" (UID: \"8a695d94-271c-45bc-8a89-dfdecb57ec00\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7xwd" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.037083 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/8a695d94-271c-45bc-8a89-dfdecb57ec00-host-kubelet\") pod \"ovnkube-node-p7xwd\" (UID: \"8a695d94-271c-45bc-8a89-dfdecb57ec00\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7xwd" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.037103 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/350e8b9a-b7bf-4dc9-abe9-d10f7a088be3-host-var-lib-cni-bin\") pod \"multus-9kdpn\" (UID: \"350e8b9a-b7bf-4dc9-abe9-d10f7a088be3\") " pod="openshift-multus/multus-9kdpn" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.037122 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/8a695d94-271c-45bc-8a89-dfdecb57ec00-ovnkube-script-lib\") pod \"ovnkube-node-p7xwd\" (UID: \"8a695d94-271c-45bc-8a89-dfdecb57ec00\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7xwd" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.037140 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/350e8b9a-b7bf-4dc9-abe9-d10f7a088be3-cnibin\") pod \"multus-9kdpn\" (UID: \"350e8b9a-b7bf-4dc9-abe9-d10f7a088be3\") " pod="openshift-multus/multus-9kdpn" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.037158 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/1a047c4d-003e-4668-9b96-945eab34ab68-proxy-tls\") pod \"machine-config-daemon-w25db\" (UID: \"1a047c4d-003e-4668-9b96-945eab34ab68\") " pod="openshift-machine-config-operator/machine-config-daemon-w25db" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.037184 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/350e8b9a-b7bf-4dc9-abe9-d10f7a088be3-host-var-lib-cni-multus\") pod \"multus-9kdpn\" (UID: \"350e8b9a-b7bf-4dc9-abe9-d10f7a088be3\") " pod="openshift-multus/multus-9kdpn" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.037201 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/350e8b9a-b7bf-4dc9-abe9-d10f7a088be3-host-run-multus-certs\") pod \"multus-9kdpn\" (UID: \"350e8b9a-b7bf-4dc9-abe9-d10f7a088be3\") " pod="openshift-multus/multus-9kdpn" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.037219 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/8a695d94-271c-45bc-8a89-dfdecb57ec00-etc-openvswitch\") pod \"ovnkube-node-p7xwd\" (UID: \"8a695d94-271c-45bc-8a89-dfdecb57ec00\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7xwd" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.037236 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/8a695d94-271c-45bc-8a89-dfdecb57ec00-host-run-ovn-kubernetes\") pod \"ovnkube-node-p7xwd\" (UID: \"8a695d94-271c-45bc-8a89-dfdecb57ec00\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7xwd" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.037256 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/350e8b9a-b7bf-4dc9-abe9-d10f7a088be3-multus-daemon-config\") pod \"multus-9kdpn\" (UID: \"350e8b9a-b7bf-4dc9-abe9-d10f7a088be3\") " pod="openshift-multus/multus-9kdpn" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.037274 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k5gxl\" (UniqueName: \"kubernetes.io/projected/8a695d94-271c-45bc-8a89-dfdecb57ec00-kube-api-access-k5gxl\") pod \"ovnkube-node-p7xwd\" (UID: \"8a695d94-271c-45bc-8a89-dfdecb57ec00\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7xwd" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.037292 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/350e8b9a-b7bf-4dc9-abe9-d10f7a088be3-host-run-netns\") pod \"multus-9kdpn\" (UID: \"350e8b9a-b7bf-4dc9-abe9-d10f7a088be3\") " pod="openshift-multus/multus-9kdpn" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.037310 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/350e8b9a-b7bf-4dc9-abe9-d10f7a088be3-host-var-lib-kubelet\") pod \"multus-9kdpn\" (UID: \"350e8b9a-b7bf-4dc9-abe9-d10f7a088be3\") " pod="openshift-multus/multus-9kdpn" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.037327 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/350e8b9a-b7bf-4dc9-abe9-d10f7a088be3-etc-kubernetes\") pod \"multus-9kdpn\" (UID: \"350e8b9a-b7bf-4dc9-abe9-d10f7a088be3\") " pod="openshift-multus/multus-9kdpn" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.037345 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/7dc41f9e-e763-4a9a-a064-f65bc24332b9-tuning-conf-dir\") pod \"multus-additional-cni-plugins-j4rf7\" (UID: \"7dc41f9e-e763-4a9a-a064-f65bc24332b9\") " pod="openshift-multus/multus-additional-cni-plugins-j4rf7" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.037363 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/350e8b9a-b7bf-4dc9-abe9-d10f7a088be3-multus-cni-dir\") pod \"multus-9kdpn\" (UID: \"350e8b9a-b7bf-4dc9-abe9-d10f7a088be3\") " pod="openshift-multus/multus-9kdpn" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.037380 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/8a695d94-271c-45bc-8a89-dfdecb57ec00-run-systemd\") pod \"ovnkube-node-p7xwd\" (UID: \"8a695d94-271c-45bc-8a89-dfdecb57ec00\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7xwd" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.037396 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/8a695d94-271c-45bc-8a89-dfdecb57ec00-var-lib-openvswitch\") pod \"ovnkube-node-p7xwd\" (UID: \"8a695d94-271c-45bc-8a89-dfdecb57ec00\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7xwd" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.037414 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/1a047c4d-003e-4668-9b96-945eab34ab68-rootfs\") pod \"machine-config-daemon-w25db\" (UID: \"1a047c4d-003e-4668-9b96-945eab34ab68\") " pod="openshift-machine-config-operator/machine-config-daemon-w25db" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.037431 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-46c8r\" (UniqueName: \"kubernetes.io/projected/350e8b9a-b7bf-4dc9-abe9-d10f7a088be3-kube-api-access-46c8r\") pod \"multus-9kdpn\" (UID: \"350e8b9a-b7bf-4dc9-abe9-d10f7a088be3\") " pod="openshift-multus/multus-9kdpn" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.037449 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7dc41f9e-e763-4a9a-a064-f65bc24332b9-cni-binary-copy\") pod \"multus-additional-cni-plugins-j4rf7\" (UID: \"7dc41f9e-e763-4a9a-a064-f65bc24332b9\") " pod="openshift-multus/multus-additional-cni-plugins-j4rf7" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.037467 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/8a695d94-271c-45bc-8a89-dfdecb57ec00-run-openvswitch\") pod \"ovnkube-node-p7xwd\" (UID: \"8a695d94-271c-45bc-8a89-dfdecb57ec00\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7xwd" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.037484 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/1a047c4d-003e-4668-9b96-945eab34ab68-mcd-auth-proxy-config\") pod \"machine-config-daemon-w25db\" (UID: \"1a047c4d-003e-4668-9b96-945eab34ab68\") " pod="openshift-machine-config-operator/machine-config-daemon-w25db" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.037525 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/350e8b9a-b7bf-4dc9-abe9-d10f7a088be3-host-run-k8s-cni-cncf-io\") pod \"multus-9kdpn\" (UID: \"350e8b9a-b7bf-4dc9-abe9-d10f7a088be3\") " pod="openshift-multus/multus-9kdpn" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.037544 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/8a695d94-271c-45bc-8a89-dfdecb57ec00-systemd-units\") pod \"ovnkube-node-p7xwd\" (UID: \"8a695d94-271c-45bc-8a89-dfdecb57ec00\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7xwd" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.037561 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/8a695d94-271c-45bc-8a89-dfdecb57ec00-host-slash\") pod \"ovnkube-node-p7xwd\" (UID: \"8a695d94-271c-45bc-8a89-dfdecb57ec00\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7xwd" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.037581 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/8a695d94-271c-45bc-8a89-dfdecb57ec00-host-cni-bin\") pod \"ovnkube-node-p7xwd\" (UID: \"8a695d94-271c-45bc-8a89-dfdecb57ec00\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7xwd" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.037597 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/8a695d94-271c-45bc-8a89-dfdecb57ec00-ovnkube-config\") pod \"ovnkube-node-p7xwd\" (UID: \"8a695d94-271c-45bc-8a89-dfdecb57ec00\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7xwd" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.037615 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6dn2k\" (UniqueName: \"kubernetes.io/projected/1a047c4d-003e-4668-9b96-945eab34ab68-kube-api-access-6dn2k\") pod \"machine-config-daemon-w25db\" (UID: \"1a047c4d-003e-4668-9b96-945eab34ab68\") " pod="openshift-machine-config-operator/machine-config-daemon-w25db" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.037633 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7dc41f9e-e763-4a9a-a064-f65bc24332b9-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-j4rf7\" (UID: \"7dc41f9e-e763-4a9a-a064-f65bc24332b9\") " pod="openshift-multus/multus-additional-cni-plugins-j4rf7" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.037650 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/8a695d94-271c-45bc-8a89-dfdecb57ec00-host-run-netns\") pod \"ovnkube-node-p7xwd\" (UID: \"8a695d94-271c-45bc-8a89-dfdecb57ec00\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7xwd" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.037666 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/8a695d94-271c-45bc-8a89-dfdecb57ec00-run-ovn\") pod \"ovnkube-node-p7xwd\" (UID: \"8a695d94-271c-45bc-8a89-dfdecb57ec00\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7xwd" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.037775 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/350e8b9a-b7bf-4dc9-abe9-d10f7a088be3-os-release\") pod \"multus-9kdpn\" (UID: \"350e8b9a-b7bf-4dc9-abe9-d10f7a088be3\") " pod="openshift-multus/multus-9kdpn" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.037817 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/350e8b9a-b7bf-4dc9-abe9-d10f7a088be3-multus-socket-dir-parent\") pod \"multus-9kdpn\" (UID: \"350e8b9a-b7bf-4dc9-abe9-d10f7a088be3\") " pod="openshift-multus/multus-9kdpn" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.037841 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/350e8b9a-b7bf-4dc9-abe9-d10f7a088be3-multus-conf-dir\") pod \"multus-9kdpn\" (UID: \"350e8b9a-b7bf-4dc9-abe9-d10f7a088be3\") " pod="openshift-multus/multus-9kdpn" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.037964 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/350e8b9a-b7bf-4dc9-abe9-d10f7a088be3-system-cni-dir\") pod \"multus-9kdpn\" (UID: \"350e8b9a-b7bf-4dc9-abe9-d10f7a088be3\") " pod="openshift-multus/multus-9kdpn" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.038053 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/350e8b9a-b7bf-4dc9-abe9-d10f7a088be3-hostroot\") pod \"multus-9kdpn\" (UID: \"350e8b9a-b7bf-4dc9-abe9-d10f7a088be3\") " pod="openshift-multus/multus-9kdpn" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.038096 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/350e8b9a-b7bf-4dc9-abe9-d10f7a088be3-host-var-lib-cni-bin\") pod \"multus-9kdpn\" (UID: \"350e8b9a-b7bf-4dc9-abe9-d10f7a088be3\") " pod="openshift-multus/multus-9kdpn" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.038132 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/350e8b9a-b7bf-4dc9-abe9-d10f7a088be3-cnibin\") pod \"multus-9kdpn\" (UID: \"350e8b9a-b7bf-4dc9-abe9-d10f7a088be3\") " pod="openshift-multus/multus-9kdpn" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.038159 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/350e8b9a-b7bf-4dc9-abe9-d10f7a088be3-host-var-lib-cni-multus\") pod \"multus-9kdpn\" (UID: \"350e8b9a-b7bf-4dc9-abe9-d10f7a088be3\") " pod="openshift-multus/multus-9kdpn" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.038180 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/350e8b9a-b7bf-4dc9-abe9-d10f7a088be3-host-run-multus-certs\") pod \"multus-9kdpn\" (UID: \"350e8b9a-b7bf-4dc9-abe9-d10f7a088be3\") " pod="openshift-multus/multus-9kdpn" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.038233 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/350e8b9a-b7bf-4dc9-abe9-d10f7a088be3-host-run-netns\") pod \"multus-9kdpn\" (UID: \"350e8b9a-b7bf-4dc9-abe9-d10f7a088be3\") " pod="openshift-multus/multus-9kdpn" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.038265 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/350e8b9a-b7bf-4dc9-abe9-d10f7a088be3-host-var-lib-kubelet\") pod \"multus-9kdpn\" (UID: \"350e8b9a-b7bf-4dc9-abe9-d10f7a088be3\") " pod="openshift-multus/multus-9kdpn" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.038288 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/350e8b9a-b7bf-4dc9-abe9-d10f7a088be3-etc-kubernetes\") pod \"multus-9kdpn\" (UID: \"350e8b9a-b7bf-4dc9-abe9-d10f7a088be3\") " pod="openshift-multus/multus-9kdpn" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.038408 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/350e8b9a-b7bf-4dc9-abe9-d10f7a088be3-multus-cni-dir\") pod \"multus-9kdpn\" (UID: \"350e8b9a-b7bf-4dc9-abe9-d10f7a088be3\") " pod="openshift-multus/multus-9kdpn" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.038556 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/350e8b9a-b7bf-4dc9-abe9-d10f7a088be3-host-run-k8s-cni-cncf-io\") pod \"multus-9kdpn\" (UID: \"350e8b9a-b7bf-4dc9-abe9-d10f7a088be3\") " pod="openshift-multus/multus-9kdpn" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.074105 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.074141 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.074176 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.074192 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.074203 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:27Z","lastTransitionTime":"2025-12-06T15:32:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.130666 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:24Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:24Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ab9b858ed30211c345e146452a18252752eeedcb3be8054daa2af371297cbb61\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:27Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.138981 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/8a695d94-271c-45bc-8a89-dfdecb57ec00-run-systemd\") pod \"ovnkube-node-p7xwd\" (UID: \"8a695d94-271c-45bc-8a89-dfdecb57ec00\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7xwd" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.139033 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/8a695d94-271c-45bc-8a89-dfdecb57ec00-var-lib-openvswitch\") pod \"ovnkube-node-p7xwd\" (UID: \"8a695d94-271c-45bc-8a89-dfdecb57ec00\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7xwd" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.139057 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7dc41f9e-e763-4a9a-a064-f65bc24332b9-cni-binary-copy\") pod \"multus-additional-cni-plugins-j4rf7\" (UID: \"7dc41f9e-e763-4a9a-a064-f65bc24332b9\") " pod="openshift-multus/multus-additional-cni-plugins-j4rf7" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.139078 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/8a695d94-271c-45bc-8a89-dfdecb57ec00-run-openvswitch\") pod \"ovnkube-node-p7xwd\" (UID: \"8a695d94-271c-45bc-8a89-dfdecb57ec00\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7xwd" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.139099 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/1a047c4d-003e-4668-9b96-945eab34ab68-rootfs\") pod \"machine-config-daemon-w25db\" (UID: \"1a047c4d-003e-4668-9b96-945eab34ab68\") " pod="openshift-machine-config-operator/machine-config-daemon-w25db" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.139127 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/8a695d94-271c-45bc-8a89-dfdecb57ec00-host-slash\") pod \"ovnkube-node-p7xwd\" (UID: \"8a695d94-271c-45bc-8a89-dfdecb57ec00\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7xwd" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.139145 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/8a695d94-271c-45bc-8a89-dfdecb57ec00-host-cni-bin\") pod \"ovnkube-node-p7xwd\" (UID: \"8a695d94-271c-45bc-8a89-dfdecb57ec00\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7xwd" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.139168 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/1a047c4d-003e-4668-9b96-945eab34ab68-mcd-auth-proxy-config\") pod \"machine-config-daemon-w25db\" (UID: \"1a047c4d-003e-4668-9b96-945eab34ab68\") " pod="openshift-machine-config-operator/machine-config-daemon-w25db" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.139200 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/8a695d94-271c-45bc-8a89-dfdecb57ec00-systemd-units\") pod \"ovnkube-node-p7xwd\" (UID: \"8a695d94-271c-45bc-8a89-dfdecb57ec00\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7xwd" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.139220 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/8a695d94-271c-45bc-8a89-dfdecb57ec00-host-run-netns\") pod \"ovnkube-node-p7xwd\" (UID: \"8a695d94-271c-45bc-8a89-dfdecb57ec00\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7xwd" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.139240 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/8a695d94-271c-45bc-8a89-dfdecb57ec00-run-ovn\") pod \"ovnkube-node-p7xwd\" (UID: \"8a695d94-271c-45bc-8a89-dfdecb57ec00\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7xwd" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.139262 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/8a695d94-271c-45bc-8a89-dfdecb57ec00-ovnkube-config\") pod \"ovnkube-node-p7xwd\" (UID: \"8a695d94-271c-45bc-8a89-dfdecb57ec00\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7xwd" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.139282 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6dn2k\" (UniqueName: \"kubernetes.io/projected/1a047c4d-003e-4668-9b96-945eab34ab68-kube-api-access-6dn2k\") pod \"machine-config-daemon-w25db\" (UID: \"1a047c4d-003e-4668-9b96-945eab34ab68\") " pod="openshift-machine-config-operator/machine-config-daemon-w25db" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.139304 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7dc41f9e-e763-4a9a-a064-f65bc24332b9-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-j4rf7\" (UID: \"7dc41f9e-e763-4a9a-a064-f65bc24332b9\") " pod="openshift-multus/multus-additional-cni-plugins-j4rf7" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.139325 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/7dc41f9e-e763-4a9a-a064-f65bc24332b9-system-cni-dir\") pod \"multus-additional-cni-plugins-j4rf7\" (UID: \"7dc41f9e-e763-4a9a-a064-f65bc24332b9\") " pod="openshift-multus/multus-additional-cni-plugins-j4rf7" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.139346 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/8a695d94-271c-45bc-8a89-dfdecb57ec00-ovn-node-metrics-cert\") pod \"ovnkube-node-p7xwd\" (UID: \"8a695d94-271c-45bc-8a89-dfdecb57ec00\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7xwd" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.139377 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/7dc41f9e-e763-4a9a-a064-f65bc24332b9-cnibin\") pod \"multus-additional-cni-plugins-j4rf7\" (UID: \"7dc41f9e-e763-4a9a-a064-f65bc24332b9\") " pod="openshift-multus/multus-additional-cni-plugins-j4rf7" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.139399 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qltc5\" (UniqueName: \"kubernetes.io/projected/7dc41f9e-e763-4a9a-a064-f65bc24332b9-kube-api-access-qltc5\") pod \"multus-additional-cni-plugins-j4rf7\" (UID: \"7dc41f9e-e763-4a9a-a064-f65bc24332b9\") " pod="openshift-multus/multus-additional-cni-plugins-j4rf7" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.139421 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/8a695d94-271c-45bc-8a89-dfdecb57ec00-log-socket\") pod \"ovnkube-node-p7xwd\" (UID: \"8a695d94-271c-45bc-8a89-dfdecb57ec00\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7xwd" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.139443 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/8a695d94-271c-45bc-8a89-dfdecb57ec00-host-cni-netd\") pod \"ovnkube-node-p7xwd\" (UID: \"8a695d94-271c-45bc-8a89-dfdecb57ec00\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7xwd" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.139466 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/8a695d94-271c-45bc-8a89-dfdecb57ec00-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-p7xwd\" (UID: \"8a695d94-271c-45bc-8a89-dfdecb57ec00\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7xwd" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.139518 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/7dc41f9e-e763-4a9a-a064-f65bc24332b9-os-release\") pod \"multus-additional-cni-plugins-j4rf7\" (UID: \"7dc41f9e-e763-4a9a-a064-f65bc24332b9\") " pod="openshift-multus/multus-additional-cni-plugins-j4rf7" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.139538 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/8a695d94-271c-45bc-8a89-dfdecb57ec00-node-log\") pod \"ovnkube-node-p7xwd\" (UID: \"8a695d94-271c-45bc-8a89-dfdecb57ec00\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7xwd" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.139556 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/8a695d94-271c-45bc-8a89-dfdecb57ec00-env-overrides\") pod \"ovnkube-node-p7xwd\" (UID: \"8a695d94-271c-45bc-8a89-dfdecb57ec00\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7xwd" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.139582 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/8a695d94-271c-45bc-8a89-dfdecb57ec00-host-kubelet\") pod \"ovnkube-node-p7xwd\" (UID: \"8a695d94-271c-45bc-8a89-dfdecb57ec00\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7xwd" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.139600 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/8a695d94-271c-45bc-8a89-dfdecb57ec00-ovnkube-script-lib\") pod \"ovnkube-node-p7xwd\" (UID: \"8a695d94-271c-45bc-8a89-dfdecb57ec00\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7xwd" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.139621 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/1a047c4d-003e-4668-9b96-945eab34ab68-proxy-tls\") pod \"machine-config-daemon-w25db\" (UID: \"1a047c4d-003e-4668-9b96-945eab34ab68\") " pod="openshift-machine-config-operator/machine-config-daemon-w25db" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.139641 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/8a695d94-271c-45bc-8a89-dfdecb57ec00-etc-openvswitch\") pod \"ovnkube-node-p7xwd\" (UID: \"8a695d94-271c-45bc-8a89-dfdecb57ec00\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7xwd" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.139663 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/8a695d94-271c-45bc-8a89-dfdecb57ec00-host-run-ovn-kubernetes\") pod \"ovnkube-node-p7xwd\" (UID: \"8a695d94-271c-45bc-8a89-dfdecb57ec00\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7xwd" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.139700 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k5gxl\" (UniqueName: \"kubernetes.io/projected/8a695d94-271c-45bc-8a89-dfdecb57ec00-kube-api-access-k5gxl\") pod \"ovnkube-node-p7xwd\" (UID: \"8a695d94-271c-45bc-8a89-dfdecb57ec00\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7xwd" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.139723 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/7dc41f9e-e763-4a9a-a064-f65bc24332b9-tuning-conf-dir\") pod \"multus-additional-cni-plugins-j4rf7\" (UID: \"7dc41f9e-e763-4a9a-a064-f65bc24332b9\") " pod="openshift-multus/multus-additional-cni-plugins-j4rf7" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.139897 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/7dc41f9e-e763-4a9a-a064-f65bc24332b9-system-cni-dir\") pod \"multus-additional-cni-plugins-j4rf7\" (UID: \"7dc41f9e-e763-4a9a-a064-f65bc24332b9\") " pod="openshift-multus/multus-additional-cni-plugins-j4rf7" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.139951 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/8a695d94-271c-45bc-8a89-dfdecb57ec00-host-cni-bin\") pod \"ovnkube-node-p7xwd\" (UID: \"8a695d94-271c-45bc-8a89-dfdecb57ec00\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7xwd" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.139966 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/8a695d94-271c-45bc-8a89-dfdecb57ec00-node-log\") pod \"ovnkube-node-p7xwd\" (UID: \"8a695d94-271c-45bc-8a89-dfdecb57ec00\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7xwd" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.140002 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/8a695d94-271c-45bc-8a89-dfdecb57ec00-run-systemd\") pod \"ovnkube-node-p7xwd\" (UID: \"8a695d94-271c-45bc-8a89-dfdecb57ec00\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7xwd" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.140037 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/8a695d94-271c-45bc-8a89-dfdecb57ec00-var-lib-openvswitch\") pod \"ovnkube-node-p7xwd\" (UID: \"8a695d94-271c-45bc-8a89-dfdecb57ec00\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7xwd" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.140051 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/8a695d94-271c-45bc-8a89-dfdecb57ec00-host-cni-netd\") pod \"ovnkube-node-p7xwd\" (UID: \"8a695d94-271c-45bc-8a89-dfdecb57ec00\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7xwd" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.140100 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/8a695d94-271c-45bc-8a89-dfdecb57ec00-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-p7xwd\" (UID: \"8a695d94-271c-45bc-8a89-dfdecb57ec00\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7xwd" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.140101 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/8a695d94-271c-45bc-8a89-dfdecb57ec00-run-openvswitch\") pod \"ovnkube-node-p7xwd\" (UID: \"8a695d94-271c-45bc-8a89-dfdecb57ec00\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7xwd" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.140097 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/8a695d94-271c-45bc-8a89-dfdecb57ec00-log-socket\") pod \"ovnkube-node-p7xwd\" (UID: \"8a695d94-271c-45bc-8a89-dfdecb57ec00\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7xwd" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.140168 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/8a695d94-271c-45bc-8a89-dfdecb57ec00-run-ovn\") pod \"ovnkube-node-p7xwd\" (UID: \"8a695d94-271c-45bc-8a89-dfdecb57ec00\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7xwd" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.140145 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/8a695d94-271c-45bc-8a89-dfdecb57ec00-host-slash\") pod \"ovnkube-node-p7xwd\" (UID: \"8a695d94-271c-45bc-8a89-dfdecb57ec00\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7xwd" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.140173 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/7dc41f9e-e763-4a9a-a064-f65bc24332b9-os-release\") pod \"multus-additional-cni-plugins-j4rf7\" (UID: \"7dc41f9e-e763-4a9a-a064-f65bc24332b9\") " pod="openshift-multus/multus-additional-cni-plugins-j4rf7" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.140237 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/8a695d94-271c-45bc-8a89-dfdecb57ec00-systemd-units\") pod \"ovnkube-node-p7xwd\" (UID: \"8a695d94-271c-45bc-8a89-dfdecb57ec00\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7xwd" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.140268 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/8a695d94-271c-45bc-8a89-dfdecb57ec00-host-run-netns\") pod \"ovnkube-node-p7xwd\" (UID: \"8a695d94-271c-45bc-8a89-dfdecb57ec00\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7xwd" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.140298 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/8a695d94-271c-45bc-8a89-dfdecb57ec00-host-kubelet\") pod \"ovnkube-node-p7xwd\" (UID: \"8a695d94-271c-45bc-8a89-dfdecb57ec00\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7xwd" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.140958 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/8a695d94-271c-45bc-8a89-dfdecb57ec00-env-overrides\") pod \"ovnkube-node-p7xwd\" (UID: \"8a695d94-271c-45bc-8a89-dfdecb57ec00\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7xwd" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.141234 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/8a695d94-271c-45bc-8a89-dfdecb57ec00-ovnkube-script-lib\") pod \"ovnkube-node-p7xwd\" (UID: \"8a695d94-271c-45bc-8a89-dfdecb57ec00\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7xwd" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.141593 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/7dc41f9e-e763-4a9a-a064-f65bc24332b9-cnibin\") pod \"multus-additional-cni-plugins-j4rf7\" (UID: \"7dc41f9e-e763-4a9a-a064-f65bc24332b9\") " pod="openshift-multus/multus-additional-cni-plugins-j4rf7" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.140126 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/1a047c4d-003e-4668-9b96-945eab34ab68-rootfs\") pod \"machine-config-daemon-w25db\" (UID: \"1a047c4d-003e-4668-9b96-945eab34ab68\") " pod="openshift-machine-config-operator/machine-config-daemon-w25db" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.141737 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/8a695d94-271c-45bc-8a89-dfdecb57ec00-ovnkube-config\") pod \"ovnkube-node-p7xwd\" (UID: \"8a695d94-271c-45bc-8a89-dfdecb57ec00\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7xwd" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.141979 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/8a695d94-271c-45bc-8a89-dfdecb57ec00-host-run-ovn-kubernetes\") pod \"ovnkube-node-p7xwd\" (UID: \"8a695d94-271c-45bc-8a89-dfdecb57ec00\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7xwd" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.142012 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/8a695d94-271c-45bc-8a89-dfdecb57ec00-etc-openvswitch\") pod \"ovnkube-node-p7xwd\" (UID: \"8a695d94-271c-45bc-8a89-dfdecb57ec00\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7xwd" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.142471 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7dc41f9e-e763-4a9a-a064-f65bc24332b9-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-j4rf7\" (UID: \"7dc41f9e-e763-4a9a-a064-f65bc24332b9\") " pod="openshift-multus/multus-additional-cni-plugins-j4rf7" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.146691 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/7dc41f9e-e763-4a9a-a064-f65bc24332b9-tuning-conf-dir\") pod \"multus-additional-cni-plugins-j4rf7\" (UID: \"7dc41f9e-e763-4a9a-a064-f65bc24332b9\") " pod="openshift-multus/multus-additional-cni-plugins-j4rf7" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.147179 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/1a047c4d-003e-4668-9b96-945eab34ab68-mcd-auth-proxy-config\") pod \"machine-config-daemon-w25db\" (UID: \"1a047c4d-003e-4668-9b96-945eab34ab68\") " pod="openshift-machine-config-operator/machine-config-daemon-w25db" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.153237 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/8a695d94-271c-45bc-8a89-dfdecb57ec00-ovn-node-metrics-cert\") pod \"ovnkube-node-p7xwd\" (UID: \"8a695d94-271c-45bc-8a89-dfdecb57ec00\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7xwd" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.159966 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/1a047c4d-003e-4668-9b96-945eab34ab68-proxy-tls\") pod \"machine-config-daemon-w25db\" (UID: \"1a047c4d-003e-4668-9b96-945eab34ab68\") " pod="openshift-machine-config-operator/machine-config-daemon-w25db" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.176555 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.176602 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.176618 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.176638 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.176651 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:27Z","lastTransitionTime":"2025-12-06T15:32:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.180430 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k5gxl\" (UniqueName: \"kubernetes.io/projected/8a695d94-271c-45bc-8a89-dfdecb57ec00-kube-api-access-k5gxl\") pod \"ovnkube-node-p7xwd\" (UID: \"8a695d94-271c-45bc-8a89-dfdecb57ec00\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7xwd" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.186995 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6dn2k\" (UniqueName: \"kubernetes.io/projected/1a047c4d-003e-4668-9b96-945eab34ab68-kube-api-access-6dn2k\") pod \"machine-config-daemon-w25db\" (UID: \"1a047c4d-003e-4668-9b96-945eab34ab68\") " pod="openshift-machine-config-operator/machine-config-daemon-w25db" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.188049 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:27Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.243764 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-mdz5n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2a9b6e3b-054f-40de-9ad6-dd7eb78eaea1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:25Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:25Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wqmpg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:25Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-mdz5n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:27Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.268795 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5ae2d36f-5a31-4da3-aae8-0378c481f230\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9f683cdba22afa5d1d069cc32c6c83addf344c8b0ba3b39b7a2ca527408922c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c777cc5d07ff005aa8001da8a566484710f0253e4a6061e3297a884c6153a7d0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ac8291e7fa9a5ff379474802b6eae771542705aef4c443cc3123837d10dd9e5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae2215e89d65c7bd69288cda74cdd83f0349f57b47f80ff1fa03d60a484558b5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://efc65bc9bb60202069cb51eed80fb62e527399c4189554e791bab3e23993c95c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-06T15:32:20Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1206 15:32:14.957831 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1206 15:32:14.959306 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1213261896/tls.crt::/tmp/serving-cert-1213261896/tls.key\\\\\\\"\\\\nI1206 15:32:20.684925 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1206 15:32:20.690335 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1206 15:32:20.690422 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1206 15:32:20.690568 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1206 15:32:20.690599 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1206 15:32:20.700990 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1206 15:32:20.701016 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1206 15:32:20.701048 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1206 15:32:20.701062 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1206 15:32:20.701075 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1206 15:32:20.701085 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1206 15:32:20.701093 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1206 15:32:20.701100 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1206 15:32:20.702607 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:04Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6013b3a95ef99138e6ea971b51a45f65923c09435a2595bba7af91eebb28d76\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4256540a96bdcd018eb514cf70eea305513bb418afa89c8bfa5179c67822380e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4256540a96bdcd018eb514cf70eea305513bb418afa89c8bfa5179c67822380e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:27Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.278258 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-w25db" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.278719 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.278755 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.278764 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.278779 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.278791 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:27Z","lastTransitionTime":"2025-12-06T15:32:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.288709 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-p7xwd" Dec 06 15:32:27 crc kubenswrapper[5003]: W1206 15:32:27.290518 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1a047c4d_003e_4668_9b96_945eab34ab68.slice/crio-b3d5483d30568980a64a8457d9132b407c61c578a91ed900c02c5064b2608d27 WatchSource:0}: Error finding container b3d5483d30568980a64a8457d9132b407c61c578a91ed900c02c5064b2608d27: Status 404 returned error can't find the container with id b3d5483d30568980a64a8457d9132b407c61c578a91ed900c02c5064b2608d27 Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.292262 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c48c2555-6e1a-4e2a-801c-de22b016a80d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d0880a937c71f4a7b46ffc4dcc10f50f5db23655dedb5d7e95b36cf0fcb44928\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f5d5d47cea1d06df2f36a0a15126c2419e5051d84842c781921577bec3c65f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5e7cb74b8b6e2a00de6dbbd1689b52fcc3ae9862d40685bab7a805646abd4a3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3377599a5ceebbce7bd9d45a6f78122fc48d86ff917a6dcfc03304ca3361659\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:02Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:27Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:27 crc kubenswrapper[5003]: W1206 15:32:27.303726 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8a695d94_271c_45bc_8a89_dfdecb57ec00.slice/crio-aabc580923acc64fe2a5e1d57e615c5b8f2423b6a65be317d0e9d742a1cbe295 WatchSource:0}: Error finding container aabc580923acc64fe2a5e1d57e615c5b8f2423b6a65be317d0e9d742a1cbe295: Status 404 returned error can't find the container with id aabc580923acc64fe2a5e1d57e615c5b8f2423b6a65be317d0e9d742a1cbe295 Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.305692 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa744e792aa97558246eaa95c80a307138b1a6e08fe5de144b3fff62f3932e20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:27Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.323017 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://224b845cf75da20cf87e5435770288110776a0edd92c9c110d6dc539419ecb7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0df37db3b8682032a0065b4ca1cead69bf0b32cb4e2fab89c53e14eb2169b7e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:27Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.344143 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:27Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.355511 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-qcqkl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"956ed4a8-8918-48eb-a2f8-f35a9ae17fde\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8ae5ee47d26422cffca5f12a4ee1455dcd34c148b1b490d69b88280b4497da11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbhrw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-qcqkl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:27Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.369012 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:27Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.380392 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.380426 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.380434 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.380450 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.380460 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:27Z","lastTransitionTime":"2025-12-06T15:32:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.412712 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-mdz5n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2a9b6e3b-054f-40de-9ad6-dd7eb78eaea1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cac97fc8a54c2fbd81b0abade11a987e5226084d8a4f75ddbbaad1f6c4cb9783\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wqmpg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:25Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-mdz5n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:27Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.436138 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-w25db" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1a047c4d-003e-4668-9b96-945eab34ab68\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6dn2k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6dn2k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:26Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-w25db\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:27Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.458093 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://224b845cf75da20cf87e5435770288110776a0edd92c9c110d6dc539419ecb7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0df37db3b8682032a0065b4ca1cead69bf0b32cb4e2fab89c53e14eb2169b7e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:27Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.476031 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-j4rf7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7dc41f9e-e763-4a9a-a064-f65bc24332b9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-j4rf7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:27Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.482929 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.482969 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.482980 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.482998 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.483009 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:27Z","lastTransitionTime":"2025-12-06T15:32:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.497335 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5ae2d36f-5a31-4da3-aae8-0378c481f230\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9f683cdba22afa5d1d069cc32c6c83addf344c8b0ba3b39b7a2ca527408922c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c777cc5d07ff005aa8001da8a566484710f0253e4a6061e3297a884c6153a7d0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ac8291e7fa9a5ff379474802b6eae771542705aef4c443cc3123837d10dd9e5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae2215e89d65c7bd69288cda74cdd83f0349f57b47f80ff1fa03d60a484558b5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://efc65bc9bb60202069cb51eed80fb62e527399c4189554e791bab3e23993c95c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-06T15:32:20Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1206 15:32:14.957831 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1206 15:32:14.959306 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1213261896/tls.crt::/tmp/serving-cert-1213261896/tls.key\\\\\\\"\\\\nI1206 15:32:20.684925 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1206 15:32:20.690335 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1206 15:32:20.690422 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1206 15:32:20.690568 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1206 15:32:20.690599 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1206 15:32:20.700990 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1206 15:32:20.701016 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1206 15:32:20.701048 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1206 15:32:20.701062 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1206 15:32:20.701075 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1206 15:32:20.701085 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1206 15:32:20.701093 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1206 15:32:20.701100 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1206 15:32:20.702607 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:04Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6013b3a95ef99138e6ea971b51a45f65923c09435a2595bba7af91eebb28d76\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4256540a96bdcd018eb514cf70eea305513bb418afa89c8bfa5179c67822380e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4256540a96bdcd018eb514cf70eea305513bb418afa89c8bfa5179c67822380e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:27Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.509179 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c48c2555-6e1a-4e2a-801c-de22b016a80d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d0880a937c71f4a7b46ffc4dcc10f50f5db23655dedb5d7e95b36cf0fcb44928\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f5d5d47cea1d06df2f36a0a15126c2419e5051d84842c781921577bec3c65f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5e7cb74b8b6e2a00de6dbbd1689b52fcc3ae9862d40685bab7a805646abd4a3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3377599a5ceebbce7bd9d45a6f78122fc48d86ff917a6dcfc03304ca3361659\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:02Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:27Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.522130 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa744e792aa97558246eaa95c80a307138b1a6e08fe5de144b3fff62f3932e20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:27Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.534409 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:27Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.546309 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:24Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:24Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ab9b858ed30211c345e146452a18252752eeedcb3be8054daa2af371297cbb61\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:27Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.571109 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7xwd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a695d94-271c-45bc-8a89-dfdecb57ec00\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-p7xwd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:27Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.587699 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:27Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.587893 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.587906 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.587913 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.587925 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.587934 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:27Z","lastTransitionTime":"2025-12-06T15:32:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.596935 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-qcqkl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"956ed4a8-8918-48eb-a2f8-f35a9ae17fde\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8ae5ee47d26422cffca5f12a4ee1455dcd34c148b1b490d69b88280b4497da11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbhrw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-qcqkl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:27Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.607746 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-9kdpn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"350e8b9a-b7bf-4dc9-abe9-d10f7a088be3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-46c8r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-9kdpn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:27Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.690110 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.690157 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.690169 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.690184 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.690195 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:27Z","lastTransitionTime":"2025-12-06T15:32:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.712356 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.712426 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.712437 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 06 15:32:27 crc kubenswrapper[5003]: E1206 15:32:27.712550 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 06 15:32:27 crc kubenswrapper[5003]: E1206 15:32:27.712628 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 06 15:32:27 crc kubenswrapper[5003]: E1206 15:32:27.712722 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.792718 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.792759 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.792769 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.792787 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.792797 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:27Z","lastTransitionTime":"2025-12-06T15:32:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.850048 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-w25db" event={"ID":"1a047c4d-003e-4668-9b96-945eab34ab68","Type":"ContainerStarted","Data":"4b3a6db6c2c8e6a283f7b46ef28ba7310d3cfc7054cfd8cbcc290ebd0d26dcf4"} Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.850131 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-w25db" event={"ID":"1a047c4d-003e-4668-9b96-945eab34ab68","Type":"ContainerStarted","Data":"b457ba1d45f5e1524b8539c4c7dc77b24f6d5aa8172b46962e31e4fb6723b7ba"} Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.850159 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-w25db" event={"ID":"1a047c4d-003e-4668-9b96-945eab34ab68","Type":"ContainerStarted","Data":"b3d5483d30568980a64a8457d9132b407c61c578a91ed900c02c5064b2608d27"} Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.851724 5003 generic.go:334] "Generic (PLEG): container finished" podID="8a695d94-271c-45bc-8a89-dfdecb57ec00" containerID="9a00902f2c2c7ca56d86553f78094f40f59ea9ef125f5a388aafd6d7fd0c20d9" exitCode=0 Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.851760 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7xwd" event={"ID":"8a695d94-271c-45bc-8a89-dfdecb57ec00","Type":"ContainerDied","Data":"9a00902f2c2c7ca56d86553f78094f40f59ea9ef125f5a388aafd6d7fd0c20d9"} Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.851812 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7xwd" event={"ID":"8a695d94-271c-45bc-8a89-dfdecb57ec00","Type":"ContainerStarted","Data":"aabc580923acc64fe2a5e1d57e615c5b8f2423b6a65be317d0e9d742a1cbe295"} Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.869915 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7xwd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a695d94-271c-45bc-8a89-dfdecb57ec00\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-p7xwd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:27Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.886295 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:27Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.895057 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.895087 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.895096 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.895110 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.895120 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:27Z","lastTransitionTime":"2025-12-06T15:32:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.900657 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:24Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:24Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ab9b858ed30211c345e146452a18252752eeedcb3be8054daa2af371297cbb61\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:27Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.912841 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-qcqkl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"956ed4a8-8918-48eb-a2f8-f35a9ae17fde\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8ae5ee47d26422cffca5f12a4ee1455dcd34c148b1b490d69b88280b4497da11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbhrw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-qcqkl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:27Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.921290 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.923541 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-9kdpn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"350e8b9a-b7bf-4dc9-abe9-d10f7a088be3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-46c8r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-9kdpn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:27Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.930357 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/350e8b9a-b7bf-4dc9-abe9-d10f7a088be3-multus-daemon-config\") pod \"multus-9kdpn\" (UID: \"350e8b9a-b7bf-4dc9-abe9-d10f7a088be3\") " pod="openshift-multus/multus-9kdpn" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.934335 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:27Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.942703 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-mdz5n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2a9b6e3b-054f-40de-9ad6-dd7eb78eaea1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cac97fc8a54c2fbd81b0abade11a987e5226084d8a4f75ddbbaad1f6c4cb9783\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wqmpg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:25Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-mdz5n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:27Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.959361 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-w25db" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1a047c4d-003e-4668-9b96-945eab34ab68\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b3a6db6c2c8e6a283f7b46ef28ba7310d3cfc7054cfd8cbcc290ebd0d26dcf4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6dn2k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b457ba1d45f5e1524b8539c4c7dc77b24f6d5aa8172b46962e31e4fb6723b7ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6dn2k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:26Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-w25db\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:27Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.975810 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:27Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.988549 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c48c2555-6e1a-4e2a-801c-de22b016a80d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d0880a937c71f4a7b46ffc4dcc10f50f5db23655dedb5d7e95b36cf0fcb44928\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f5d5d47cea1d06df2f36a0a15126c2419e5051d84842c781921577bec3c65f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5e7cb74b8b6e2a00de6dbbd1689b52fcc3ae9862d40685bab7a805646abd4a3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3377599a5ceebbce7bd9d45a6f78122fc48d86ff917a6dcfc03304ca3361659\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:02Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:27Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.998128 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.998156 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.998164 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.998177 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:27 crc kubenswrapper[5003]: I1206 15:32:27.998185 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:27Z","lastTransitionTime":"2025-12-06T15:32:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:28 crc kubenswrapper[5003]: I1206 15:32:28.014225 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa744e792aa97558246eaa95c80a307138b1a6e08fe5de144b3fff62f3932e20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:28Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:28 crc kubenswrapper[5003]: I1206 15:32:28.026948 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://224b845cf75da20cf87e5435770288110776a0edd92c9c110d6dc539419ecb7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0df37db3b8682032a0065b4ca1cead69bf0b32cb4e2fab89c53e14eb2169b7e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:28Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:28 crc kubenswrapper[5003]: E1206 15:32:28.038678 5003 configmap.go:193] Couldn't get configMap openshift-multus/cni-copy-resources: failed to sync configmap cache: timed out waiting for the condition Dec 06 15:32:28 crc kubenswrapper[5003]: E1206 15:32:28.038760 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/350e8b9a-b7bf-4dc9-abe9-d10f7a088be3-cni-binary-copy podName:350e8b9a-b7bf-4dc9-abe9-d10f7a088be3 nodeName:}" failed. No retries permitted until 2025-12-06 15:32:28.538739433 +0000 UTC m=+27.072093814 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cni-binary-copy" (UniqueName: "kubernetes.io/configmap/350e8b9a-b7bf-4dc9-abe9-d10f7a088be3-cni-binary-copy") pod "multus-9kdpn" (UID: "350e8b9a-b7bf-4dc9-abe9-d10f7a088be3") : failed to sync configmap cache: timed out waiting for the condition Dec 06 15:32:28 crc kubenswrapper[5003]: I1206 15:32:28.040863 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-j4rf7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7dc41f9e-e763-4a9a-a064-f65bc24332b9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-j4rf7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:28Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:28 crc kubenswrapper[5003]: I1206 15:32:28.052785 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5ae2d36f-5a31-4da3-aae8-0378c481f230\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9f683cdba22afa5d1d069cc32c6c83addf344c8b0ba3b39b7a2ca527408922c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c777cc5d07ff005aa8001da8a566484710f0253e4a6061e3297a884c6153a7d0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ac8291e7fa9a5ff379474802b6eae771542705aef4c443cc3123837d10dd9e5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae2215e89d65c7bd69288cda74cdd83f0349f57b47f80ff1fa03d60a484558b5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://efc65bc9bb60202069cb51eed80fb62e527399c4189554e791bab3e23993c95c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-06T15:32:20Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1206 15:32:14.957831 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1206 15:32:14.959306 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1213261896/tls.crt::/tmp/serving-cert-1213261896/tls.key\\\\\\\"\\\\nI1206 15:32:20.684925 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1206 15:32:20.690335 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1206 15:32:20.690422 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1206 15:32:20.690568 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1206 15:32:20.690599 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1206 15:32:20.700990 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1206 15:32:20.701016 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1206 15:32:20.701048 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1206 15:32:20.701062 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1206 15:32:20.701075 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1206 15:32:20.701085 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1206 15:32:20.701093 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1206 15:32:20.701100 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1206 15:32:20.702607 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:04Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6013b3a95ef99138e6ea971b51a45f65923c09435a2595bba7af91eebb28d76\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4256540a96bdcd018eb514cf70eea305513bb418afa89c8bfa5179c67822380e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4256540a96bdcd018eb514cf70eea305513bb418afa89c8bfa5179c67822380e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:28Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:28 crc kubenswrapper[5003]: I1206 15:32:28.064604 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5ae2d36f-5a31-4da3-aae8-0378c481f230\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9f683cdba22afa5d1d069cc32c6c83addf344c8b0ba3b39b7a2ca527408922c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c777cc5d07ff005aa8001da8a566484710f0253e4a6061e3297a884c6153a7d0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ac8291e7fa9a5ff379474802b6eae771542705aef4c443cc3123837d10dd9e5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae2215e89d65c7bd69288cda74cdd83f0349f57b47f80ff1fa03d60a484558b5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://efc65bc9bb60202069cb51eed80fb62e527399c4189554e791bab3e23993c95c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-06T15:32:20Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1206 15:32:14.957831 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1206 15:32:14.959306 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1213261896/tls.crt::/tmp/serving-cert-1213261896/tls.key\\\\\\\"\\\\nI1206 15:32:20.684925 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1206 15:32:20.690335 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1206 15:32:20.690422 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1206 15:32:20.690568 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1206 15:32:20.690599 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1206 15:32:20.700990 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1206 15:32:20.701016 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1206 15:32:20.701048 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1206 15:32:20.701062 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1206 15:32:20.701075 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1206 15:32:20.701085 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1206 15:32:20.701093 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1206 15:32:20.701100 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1206 15:32:20.702607 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:04Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6013b3a95ef99138e6ea971b51a45f65923c09435a2595bba7af91eebb28d76\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4256540a96bdcd018eb514cf70eea305513bb418afa89c8bfa5179c67822380e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4256540a96bdcd018eb514cf70eea305513bb418afa89c8bfa5179c67822380e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:28Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:28 crc kubenswrapper[5003]: I1206 15:32:28.075870 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c48c2555-6e1a-4e2a-801c-de22b016a80d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d0880a937c71f4a7b46ffc4dcc10f50f5db23655dedb5d7e95b36cf0fcb44928\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f5d5d47cea1d06df2f36a0a15126c2419e5051d84842c781921577bec3c65f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5e7cb74b8b6e2a00de6dbbd1689b52fcc3ae9862d40685bab7a805646abd4a3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3377599a5ceebbce7bd9d45a6f78122fc48d86ff917a6dcfc03304ca3361659\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:02Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:28Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:28 crc kubenswrapper[5003]: I1206 15:32:28.088226 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa744e792aa97558246eaa95c80a307138b1a6e08fe5de144b3fff62f3932e20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:28Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:28 crc kubenswrapper[5003]: I1206 15:32:28.099814 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://224b845cf75da20cf87e5435770288110776a0edd92c9c110d6dc539419ecb7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0df37db3b8682032a0065b4ca1cead69bf0b32cb4e2fab89c53e14eb2169b7e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:28Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:28 crc kubenswrapper[5003]: I1206 15:32:28.100333 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:28 crc kubenswrapper[5003]: I1206 15:32:28.100371 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:28 crc kubenswrapper[5003]: I1206 15:32:28.100380 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:28 crc kubenswrapper[5003]: I1206 15:32:28.100395 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:28 crc kubenswrapper[5003]: I1206 15:32:28.100404 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:28Z","lastTransitionTime":"2025-12-06T15:32:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:28 crc kubenswrapper[5003]: I1206 15:32:28.112944 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-j4rf7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7dc41f9e-e763-4a9a-a064-f65bc24332b9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-j4rf7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:28Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:28 crc kubenswrapper[5003]: I1206 15:32:28.122926 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:28Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:28 crc kubenswrapper[5003]: I1206 15:32:28.133046 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:24Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:24Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ab9b858ed30211c345e146452a18252752eeedcb3be8054daa2af371297cbb61\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:28Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:28 crc kubenswrapper[5003]: E1206 15:32:28.140642 5003 configmap.go:193] Couldn't get configMap openshift-multus/cni-copy-resources: failed to sync configmap cache: timed out waiting for the condition Dec 06 15:32:28 crc kubenswrapper[5003]: E1206 15:32:28.140716 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/7dc41f9e-e763-4a9a-a064-f65bc24332b9-cni-binary-copy podName:7dc41f9e-e763-4a9a-a064-f65bc24332b9 nodeName:}" failed. No retries permitted until 2025-12-06 15:32:28.640699203 +0000 UTC m=+27.174053584 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cni-binary-copy" (UniqueName: "kubernetes.io/configmap/7dc41f9e-e763-4a9a-a064-f65bc24332b9-cni-binary-copy") pod "multus-additional-cni-plugins-j4rf7" (UID: "7dc41f9e-e763-4a9a-a064-f65bc24332b9") : failed to sync configmap cache: timed out waiting for the condition Dec 06 15:32:28 crc kubenswrapper[5003]: E1206 15:32:28.147595 5003 projected.go:288] Couldn't get configMap openshift-multus/kube-root-ca.crt: failed to sync configmap cache: timed out waiting for the condition Dec 06 15:32:28 crc kubenswrapper[5003]: E1206 15:32:28.147632 5003 projected.go:194] Error preparing data for projected volume kube-api-access-46c8r for pod openshift-multus/multus-9kdpn: failed to sync configmap cache: timed out waiting for the condition Dec 06 15:32:28 crc kubenswrapper[5003]: E1206 15:32:28.147671 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/350e8b9a-b7bf-4dc9-abe9-d10f7a088be3-kube-api-access-46c8r podName:350e8b9a-b7bf-4dc9-abe9-d10f7a088be3 nodeName:}" failed. No retries permitted until 2025-12-06 15:32:28.647658893 +0000 UTC m=+27.181013274 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-46c8r" (UniqueName: "kubernetes.io/projected/350e8b9a-b7bf-4dc9-abe9-d10f7a088be3-kube-api-access-46c8r") pod "multus-9kdpn" (UID: "350e8b9a-b7bf-4dc9-abe9-d10f7a088be3") : failed to sync configmap cache: timed out waiting for the condition Dec 06 15:32:28 crc kubenswrapper[5003]: I1206 15:32:28.152853 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7xwd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a695d94-271c-45bc-8a89-dfdecb57ec00\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a00902f2c2c7ca56d86553f78094f40f59ea9ef125f5a388aafd6d7fd0c20d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9a00902f2c2c7ca56d86553f78094f40f59ea9ef125f5a388aafd6d7fd0c20d9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-p7xwd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:28Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:28 crc kubenswrapper[5003]: I1206 15:32:28.166613 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:28Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:28 crc kubenswrapper[5003]: I1206 15:32:28.177394 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-qcqkl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"956ed4a8-8918-48eb-a2f8-f35a9ae17fde\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8ae5ee47d26422cffca5f12a4ee1455dcd34c148b1b490d69b88280b4497da11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbhrw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-qcqkl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:28Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:28 crc kubenswrapper[5003]: I1206 15:32:28.196988 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-9kdpn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"350e8b9a-b7bf-4dc9-abe9-d10f7a088be3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-46c8r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-9kdpn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:28Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:28 crc kubenswrapper[5003]: E1206 15:32:28.199668 5003 projected.go:288] Couldn't get configMap openshift-multus/kube-root-ca.crt: failed to sync configmap cache: timed out waiting for the condition Dec 06 15:32:28 crc kubenswrapper[5003]: E1206 15:32:28.199720 5003 projected.go:194] Error preparing data for projected volume kube-api-access-qltc5 for pod openshift-multus/multus-additional-cni-plugins-j4rf7: failed to sync configmap cache: timed out waiting for the condition Dec 06 15:32:28 crc kubenswrapper[5003]: E1206 15:32:28.199783 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/7dc41f9e-e763-4a9a-a064-f65bc24332b9-kube-api-access-qltc5 podName:7dc41f9e-e763-4a9a-a064-f65bc24332b9 nodeName:}" failed. No retries permitted until 2025-12-06 15:32:28.699764274 +0000 UTC m=+27.233118655 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-qltc5" (UniqueName: "kubernetes.io/projected/7dc41f9e-e763-4a9a-a064-f65bc24332b9-kube-api-access-qltc5") pod "multus-additional-cni-plugins-j4rf7" (UID: "7dc41f9e-e763-4a9a-a064-f65bc24332b9") : failed to sync configmap cache: timed out waiting for the condition Dec 06 15:32:28 crc kubenswrapper[5003]: I1206 15:32:28.203186 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:28 crc kubenswrapper[5003]: I1206 15:32:28.203234 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:28 crc kubenswrapper[5003]: I1206 15:32:28.203251 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:28 crc kubenswrapper[5003]: I1206 15:32:28.203275 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:28 crc kubenswrapper[5003]: I1206 15:32:28.203293 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:28Z","lastTransitionTime":"2025-12-06T15:32:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:28 crc kubenswrapper[5003]: I1206 15:32:28.206074 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Dec 06 15:32:28 crc kubenswrapper[5003]: I1206 15:32:28.213753 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:28Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:28 crc kubenswrapper[5003]: I1206 15:32:28.227072 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-mdz5n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2a9b6e3b-054f-40de-9ad6-dd7eb78eaea1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cac97fc8a54c2fbd81b0abade11a987e5226084d8a4f75ddbbaad1f6c4cb9783\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wqmpg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:25Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-mdz5n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:28Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:28 crc kubenswrapper[5003]: I1206 15:32:28.240116 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-w25db" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1a047c4d-003e-4668-9b96-945eab34ab68\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b3a6db6c2c8e6a283f7b46ef28ba7310d3cfc7054cfd8cbcc290ebd0d26dcf4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6dn2k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b457ba1d45f5e1524b8539c4c7dc77b24f6d5aa8172b46962e31e4fb6723b7ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6dn2k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:26Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-w25db\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:28Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:28 crc kubenswrapper[5003]: I1206 15:32:28.251291 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Dec 06 15:32:28 crc kubenswrapper[5003]: I1206 15:32:28.284153 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Dec 06 15:32:28 crc kubenswrapper[5003]: I1206 15:32:28.306430 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:28 crc kubenswrapper[5003]: I1206 15:32:28.306515 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:28 crc kubenswrapper[5003]: I1206 15:32:28.306536 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:28 crc kubenswrapper[5003]: I1206 15:32:28.306563 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:28 crc kubenswrapper[5003]: I1206 15:32:28.306583 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:28Z","lastTransitionTime":"2025-12-06T15:32:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:28 crc kubenswrapper[5003]: I1206 15:32:28.408626 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:28 crc kubenswrapper[5003]: I1206 15:32:28.409107 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:28 crc kubenswrapper[5003]: I1206 15:32:28.409127 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:28 crc kubenswrapper[5003]: I1206 15:32:28.409149 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:28 crc kubenswrapper[5003]: I1206 15:32:28.409163 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:28Z","lastTransitionTime":"2025-12-06T15:32:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:28 crc kubenswrapper[5003]: I1206 15:32:28.511780 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:28 crc kubenswrapper[5003]: I1206 15:32:28.511829 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:28 crc kubenswrapper[5003]: I1206 15:32:28.511840 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:28 crc kubenswrapper[5003]: I1206 15:32:28.511857 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:28 crc kubenswrapper[5003]: I1206 15:32:28.511871 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:28Z","lastTransitionTime":"2025-12-06T15:32:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:28 crc kubenswrapper[5003]: I1206 15:32:28.555655 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/350e8b9a-b7bf-4dc9-abe9-d10f7a088be3-cni-binary-copy\") pod \"multus-9kdpn\" (UID: \"350e8b9a-b7bf-4dc9-abe9-d10f7a088be3\") " pod="openshift-multus/multus-9kdpn" Dec 06 15:32:28 crc kubenswrapper[5003]: I1206 15:32:28.556268 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/350e8b9a-b7bf-4dc9-abe9-d10f7a088be3-cni-binary-copy\") pod \"multus-9kdpn\" (UID: \"350e8b9a-b7bf-4dc9-abe9-d10f7a088be3\") " pod="openshift-multus/multus-9kdpn" Dec 06 15:32:28 crc kubenswrapper[5003]: I1206 15:32:28.613629 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:28 crc kubenswrapper[5003]: I1206 15:32:28.613669 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:28 crc kubenswrapper[5003]: I1206 15:32:28.613681 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:28 crc kubenswrapper[5003]: I1206 15:32:28.613698 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:28 crc kubenswrapper[5003]: I1206 15:32:28.613709 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:28Z","lastTransitionTime":"2025-12-06T15:32:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:28 crc kubenswrapper[5003]: I1206 15:32:28.656876 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-46c8r\" (UniqueName: \"kubernetes.io/projected/350e8b9a-b7bf-4dc9-abe9-d10f7a088be3-kube-api-access-46c8r\") pod \"multus-9kdpn\" (UID: \"350e8b9a-b7bf-4dc9-abe9-d10f7a088be3\") " pod="openshift-multus/multus-9kdpn" Dec 06 15:32:28 crc kubenswrapper[5003]: I1206 15:32:28.656923 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7dc41f9e-e763-4a9a-a064-f65bc24332b9-cni-binary-copy\") pod \"multus-additional-cni-plugins-j4rf7\" (UID: \"7dc41f9e-e763-4a9a-a064-f65bc24332b9\") " pod="openshift-multus/multus-additional-cni-plugins-j4rf7" Dec 06 15:32:28 crc kubenswrapper[5003]: I1206 15:32:28.657587 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7dc41f9e-e763-4a9a-a064-f65bc24332b9-cni-binary-copy\") pod \"multus-additional-cni-plugins-j4rf7\" (UID: \"7dc41f9e-e763-4a9a-a064-f65bc24332b9\") " pod="openshift-multus/multus-additional-cni-plugins-j4rf7" Dec 06 15:32:28 crc kubenswrapper[5003]: I1206 15:32:28.663214 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-46c8r\" (UniqueName: \"kubernetes.io/projected/350e8b9a-b7bf-4dc9-abe9-d10f7a088be3-kube-api-access-46c8r\") pod \"multus-9kdpn\" (UID: \"350e8b9a-b7bf-4dc9-abe9-d10f7a088be3\") " pod="openshift-multus/multus-9kdpn" Dec 06 15:32:28 crc kubenswrapper[5003]: I1206 15:32:28.709650 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-9kdpn" Dec 06 15:32:28 crc kubenswrapper[5003]: I1206 15:32:28.716940 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:28 crc kubenswrapper[5003]: I1206 15:32:28.716976 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:28 crc kubenswrapper[5003]: I1206 15:32:28.716988 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:28 crc kubenswrapper[5003]: I1206 15:32:28.717003 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:28 crc kubenswrapper[5003]: I1206 15:32:28.717015 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:28Z","lastTransitionTime":"2025-12-06T15:32:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:28 crc kubenswrapper[5003]: W1206 15:32:28.730127 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod350e8b9a_b7bf_4dc9_abe9_d10f7a088be3.slice/crio-f467a6dc00238bb5256a9769ef7dec2eef9cc743fdf3f6814839170b35dd3178 WatchSource:0}: Error finding container f467a6dc00238bb5256a9769ef7dec2eef9cc743fdf3f6814839170b35dd3178: Status 404 returned error can't find the container with id f467a6dc00238bb5256a9769ef7dec2eef9cc743fdf3f6814839170b35dd3178 Dec 06 15:32:28 crc kubenswrapper[5003]: I1206 15:32:28.757638 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qltc5\" (UniqueName: \"kubernetes.io/projected/7dc41f9e-e763-4a9a-a064-f65bc24332b9-kube-api-access-qltc5\") pod \"multus-additional-cni-plugins-j4rf7\" (UID: \"7dc41f9e-e763-4a9a-a064-f65bc24332b9\") " pod="openshift-multus/multus-additional-cni-plugins-j4rf7" Dec 06 15:32:28 crc kubenswrapper[5003]: I1206 15:32:28.761872 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qltc5\" (UniqueName: \"kubernetes.io/projected/7dc41f9e-e763-4a9a-a064-f65bc24332b9-kube-api-access-qltc5\") pod \"multus-additional-cni-plugins-j4rf7\" (UID: \"7dc41f9e-e763-4a9a-a064-f65bc24332b9\") " pod="openshift-multus/multus-additional-cni-plugins-j4rf7" Dec 06 15:32:28 crc kubenswrapper[5003]: I1206 15:32:28.792603 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-j4rf7" Dec 06 15:32:28 crc kubenswrapper[5003]: W1206 15:32:28.810078 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7dc41f9e_e763_4a9a_a064_f65bc24332b9.slice/crio-d6d70a8f52d2aac01bee5d9139af02645f80507a478976ff793d8c45a0e12320 WatchSource:0}: Error finding container d6d70a8f52d2aac01bee5d9139af02645f80507a478976ff793d8c45a0e12320: Status 404 returned error can't find the container with id d6d70a8f52d2aac01bee5d9139af02645f80507a478976ff793d8c45a0e12320 Dec 06 15:32:28 crc kubenswrapper[5003]: I1206 15:32:28.820265 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:28 crc kubenswrapper[5003]: I1206 15:32:28.820311 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:28 crc kubenswrapper[5003]: I1206 15:32:28.820324 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:28 crc kubenswrapper[5003]: I1206 15:32:28.820342 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:28 crc kubenswrapper[5003]: I1206 15:32:28.820711 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:28Z","lastTransitionTime":"2025-12-06T15:32:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:28 crc kubenswrapper[5003]: I1206 15:32:28.855222 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-j4rf7" event={"ID":"7dc41f9e-e763-4a9a-a064-f65bc24332b9","Type":"ContainerStarted","Data":"d6d70a8f52d2aac01bee5d9139af02645f80507a478976ff793d8c45a0e12320"} Dec 06 15:32:28 crc kubenswrapper[5003]: I1206 15:32:28.856762 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-9kdpn" event={"ID":"350e8b9a-b7bf-4dc9-abe9-d10f7a088be3","Type":"ContainerStarted","Data":"e9b32883e20da5b298374f7eebd0a21c5ca51cca23543fc78dfe3edb04e3b661"} Dec 06 15:32:28 crc kubenswrapper[5003]: I1206 15:32:28.856800 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-9kdpn" event={"ID":"350e8b9a-b7bf-4dc9-abe9-d10f7a088be3","Type":"ContainerStarted","Data":"f467a6dc00238bb5256a9769ef7dec2eef9cc743fdf3f6814839170b35dd3178"} Dec 06 15:32:28 crc kubenswrapper[5003]: I1206 15:32:28.859885 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7xwd" event={"ID":"8a695d94-271c-45bc-8a89-dfdecb57ec00","Type":"ContainerStarted","Data":"e240d517f92fb2b8e24bb6f1c1d98869b41adf8bc3ab687a4426840c7010235e"} Dec 06 15:32:28 crc kubenswrapper[5003]: I1206 15:32:28.859913 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7xwd" event={"ID":"8a695d94-271c-45bc-8a89-dfdecb57ec00","Type":"ContainerStarted","Data":"b549c3d1ba39068a4f1ce344e0807eed3b4e73ee1902d02cebffe005697201d2"} Dec 06 15:32:28 crc kubenswrapper[5003]: I1206 15:32:28.859922 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7xwd" event={"ID":"8a695d94-271c-45bc-8a89-dfdecb57ec00","Type":"ContainerStarted","Data":"b26a01a6a0644d88028ebc041e00353091f26407f109e02e4aec1ec0e382a4c6"} Dec 06 15:32:28 crc kubenswrapper[5003]: I1206 15:32:28.859930 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7xwd" event={"ID":"8a695d94-271c-45bc-8a89-dfdecb57ec00","Type":"ContainerStarted","Data":"b871ea5e9d2093579af10786c02da9d7f5afa5351933be742b67b1b682733119"} Dec 06 15:32:28 crc kubenswrapper[5003]: I1206 15:32:28.859938 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7xwd" event={"ID":"8a695d94-271c-45bc-8a89-dfdecb57ec00","Type":"ContainerStarted","Data":"276d6d2f680f4e0b2038d12b161e3fd3f85417b5d776b9661559b4812ead1dd9"} Dec 06 15:32:28 crc kubenswrapper[5003]: I1206 15:32:28.869085 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:28Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:28 crc kubenswrapper[5003]: I1206 15:32:28.881116 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-mdz5n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2a9b6e3b-054f-40de-9ad6-dd7eb78eaea1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cac97fc8a54c2fbd81b0abade11a987e5226084d8a4f75ddbbaad1f6c4cb9783\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wqmpg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:25Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-mdz5n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:28Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:28 crc kubenswrapper[5003]: I1206 15:32:28.891536 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-w25db" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1a047c4d-003e-4668-9b96-945eab34ab68\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b3a6db6c2c8e6a283f7b46ef28ba7310d3cfc7054cfd8cbcc290ebd0d26dcf4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6dn2k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b457ba1d45f5e1524b8539c4c7dc77b24f6d5aa8172b46962e31e4fb6723b7ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6dn2k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:26Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-w25db\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:28Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:28 crc kubenswrapper[5003]: I1206 15:32:28.923428 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:28 crc kubenswrapper[5003]: I1206 15:32:28.923456 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:28 crc kubenswrapper[5003]: I1206 15:32:28.923465 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:28 crc kubenswrapper[5003]: I1206 15:32:28.923477 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:28 crc kubenswrapper[5003]: I1206 15:32:28.923505 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:28Z","lastTransitionTime":"2025-12-06T15:32:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:28 crc kubenswrapper[5003]: I1206 15:32:28.927270 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-j4rf7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7dc41f9e-e763-4a9a-a064-f65bc24332b9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-j4rf7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:28Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:28 crc kubenswrapper[5003]: I1206 15:32:28.991559 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5ae2d36f-5a31-4da3-aae8-0378c481f230\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9f683cdba22afa5d1d069cc32c6c83addf344c8b0ba3b39b7a2ca527408922c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c777cc5d07ff005aa8001da8a566484710f0253e4a6061e3297a884c6153a7d0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ac8291e7fa9a5ff379474802b6eae771542705aef4c443cc3123837d10dd9e5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae2215e89d65c7bd69288cda74cdd83f0349f57b47f80ff1fa03d60a484558b5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://efc65bc9bb60202069cb51eed80fb62e527399c4189554e791bab3e23993c95c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-06T15:32:20Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1206 15:32:14.957831 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1206 15:32:14.959306 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1213261896/tls.crt::/tmp/serving-cert-1213261896/tls.key\\\\\\\"\\\\nI1206 15:32:20.684925 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1206 15:32:20.690335 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1206 15:32:20.690422 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1206 15:32:20.690568 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1206 15:32:20.690599 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1206 15:32:20.700990 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1206 15:32:20.701016 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1206 15:32:20.701048 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1206 15:32:20.701062 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1206 15:32:20.701075 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1206 15:32:20.701085 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1206 15:32:20.701093 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1206 15:32:20.701100 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1206 15:32:20.702607 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:04Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6013b3a95ef99138e6ea971b51a45f65923c09435a2595bba7af91eebb28d76\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4256540a96bdcd018eb514cf70eea305513bb418afa89c8bfa5179c67822380e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4256540a96bdcd018eb514cf70eea305513bb418afa89c8bfa5179c67822380e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:28Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:29 crc kubenswrapper[5003]: I1206 15:32:29.012771 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c48c2555-6e1a-4e2a-801c-de22b016a80d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d0880a937c71f4a7b46ffc4dcc10f50f5db23655dedb5d7e95b36cf0fcb44928\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f5d5d47cea1d06df2f36a0a15126c2419e5051d84842c781921577bec3c65f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5e7cb74b8b6e2a00de6dbbd1689b52fcc3ae9862d40685bab7a805646abd4a3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3377599a5ceebbce7bd9d45a6f78122fc48d86ff917a6dcfc03304ca3361659\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:02Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:29Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:29 crc kubenswrapper[5003]: I1206 15:32:29.027248 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa744e792aa97558246eaa95c80a307138b1a6e08fe5de144b3fff62f3932e20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:29Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:29 crc kubenswrapper[5003]: I1206 15:32:29.027414 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:29 crc kubenswrapper[5003]: I1206 15:32:29.027443 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:29 crc kubenswrapper[5003]: I1206 15:32:29.027452 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:29 crc kubenswrapper[5003]: I1206 15:32:29.027469 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:29 crc kubenswrapper[5003]: I1206 15:32:29.027501 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:29Z","lastTransitionTime":"2025-12-06T15:32:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:29 crc kubenswrapper[5003]: I1206 15:32:29.043352 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://224b845cf75da20cf87e5435770288110776a0edd92c9c110d6dc539419ecb7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0df37db3b8682032a0065b4ca1cead69bf0b32cb4e2fab89c53e14eb2169b7e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:29Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:29 crc kubenswrapper[5003]: I1206 15:32:29.054562 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:29Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:29 crc kubenswrapper[5003]: I1206 15:32:29.068400 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:24Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:24Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ab9b858ed30211c345e146452a18252752eeedcb3be8054daa2af371297cbb61\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:29Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:29 crc kubenswrapper[5003]: I1206 15:32:29.100126 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7xwd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a695d94-271c-45bc-8a89-dfdecb57ec00\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a00902f2c2c7ca56d86553f78094f40f59ea9ef125f5a388aafd6d7fd0c20d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9a00902f2c2c7ca56d86553f78094f40f59ea9ef125f5a388aafd6d7fd0c20d9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-p7xwd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:29Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:29 crc kubenswrapper[5003]: I1206 15:32:29.112061 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:29Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:29 crc kubenswrapper[5003]: I1206 15:32:29.123482 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-qcqkl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"956ed4a8-8918-48eb-a2f8-f35a9ae17fde\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8ae5ee47d26422cffca5f12a4ee1455dcd34c148b1b490d69b88280b4497da11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbhrw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-qcqkl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:29Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:29 crc kubenswrapper[5003]: I1206 15:32:29.129888 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:29 crc kubenswrapper[5003]: I1206 15:32:29.129932 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:29 crc kubenswrapper[5003]: I1206 15:32:29.129940 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:29 crc kubenswrapper[5003]: I1206 15:32:29.129954 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:29 crc kubenswrapper[5003]: I1206 15:32:29.129963 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:29Z","lastTransitionTime":"2025-12-06T15:32:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:29 crc kubenswrapper[5003]: I1206 15:32:29.136186 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-9kdpn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"350e8b9a-b7bf-4dc9-abe9-d10f7a088be3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e9b32883e20da5b298374f7eebd0a21c5ca51cca23543fc78dfe3edb04e3b661\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-46c8r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-9kdpn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:29Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:29 crc kubenswrapper[5003]: I1206 15:32:29.232310 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:29 crc kubenswrapper[5003]: I1206 15:32:29.232342 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:29 crc kubenswrapper[5003]: I1206 15:32:29.232351 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:29 crc kubenswrapper[5003]: I1206 15:32:29.232365 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:29 crc kubenswrapper[5003]: I1206 15:32:29.232376 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:29Z","lastTransitionTime":"2025-12-06T15:32:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:29 crc kubenswrapper[5003]: I1206 15:32:29.335111 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:29 crc kubenswrapper[5003]: I1206 15:32:29.335168 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:29 crc kubenswrapper[5003]: I1206 15:32:29.335193 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:29 crc kubenswrapper[5003]: I1206 15:32:29.335222 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:29 crc kubenswrapper[5003]: I1206 15:32:29.335244 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:29Z","lastTransitionTime":"2025-12-06T15:32:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:29 crc kubenswrapper[5003]: I1206 15:32:29.437056 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:29 crc kubenswrapper[5003]: I1206 15:32:29.437380 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:29 crc kubenswrapper[5003]: I1206 15:32:29.437394 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:29 crc kubenswrapper[5003]: I1206 15:32:29.437417 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:29 crc kubenswrapper[5003]: I1206 15:32:29.437428 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:29Z","lastTransitionTime":"2025-12-06T15:32:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:29 crc kubenswrapper[5003]: I1206 15:32:29.464143 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 06 15:32:29 crc kubenswrapper[5003]: I1206 15:32:29.464267 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 06 15:32:29 crc kubenswrapper[5003]: I1206 15:32:29.464312 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 06 15:32:29 crc kubenswrapper[5003]: I1206 15:32:29.464433 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 06 15:32:29 crc kubenswrapper[5003]: E1206 15:32:29.464471 5003 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 06 15:32:29 crc kubenswrapper[5003]: E1206 15:32:29.464554 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-06 15:32:37.464536224 +0000 UTC m=+35.997890605 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 06 15:32:29 crc kubenswrapper[5003]: E1206 15:32:29.464588 5003 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 06 15:32:29 crc kubenswrapper[5003]: I1206 15:32:29.464474 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 06 15:32:29 crc kubenswrapper[5003]: E1206 15:32:29.464611 5003 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 06 15:32:29 crc kubenswrapper[5003]: E1206 15:32:29.464623 5003 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 06 15:32:29 crc kubenswrapper[5003]: E1206 15:32:29.464589 5003 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 06 15:32:29 crc kubenswrapper[5003]: E1206 15:32:29.464662 5003 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 06 15:32:29 crc kubenswrapper[5003]: E1206 15:32:29.464674 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-06 15:32:37.464657387 +0000 UTC m=+35.998011858 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 06 15:32:29 crc kubenswrapper[5003]: E1206 15:32:29.464677 5003 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 06 15:32:29 crc kubenswrapper[5003]: E1206 15:32:29.464711 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-06 15:32:37.464703248 +0000 UTC m=+35.998057769 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 06 15:32:29 crc kubenswrapper[5003]: E1206 15:32:29.464619 5003 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 06 15:32:29 crc kubenswrapper[5003]: E1206 15:32:29.464747 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-06 15:32:37.464740039 +0000 UTC m=+35.998094550 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 06 15:32:29 crc kubenswrapper[5003]: E1206 15:32:29.464849 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-06 15:32:37.464839042 +0000 UTC m=+35.998193423 (durationBeforeRetry 8s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 15:32:29 crc kubenswrapper[5003]: I1206 15:32:29.541738 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:29 crc kubenswrapper[5003]: I1206 15:32:29.541777 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:29 crc kubenswrapper[5003]: I1206 15:32:29.541815 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:29 crc kubenswrapper[5003]: I1206 15:32:29.541832 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:29 crc kubenswrapper[5003]: I1206 15:32:29.541892 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:29Z","lastTransitionTime":"2025-12-06T15:32:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:29 crc kubenswrapper[5003]: I1206 15:32:29.645111 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:29 crc kubenswrapper[5003]: I1206 15:32:29.645152 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:29 crc kubenswrapper[5003]: I1206 15:32:29.645163 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:29 crc kubenswrapper[5003]: I1206 15:32:29.645181 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:29 crc kubenswrapper[5003]: I1206 15:32:29.645194 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:29Z","lastTransitionTime":"2025-12-06T15:32:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:29 crc kubenswrapper[5003]: I1206 15:32:29.712172 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 06 15:32:29 crc kubenswrapper[5003]: I1206 15:32:29.712284 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 06 15:32:29 crc kubenswrapper[5003]: E1206 15:32:29.712321 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 06 15:32:29 crc kubenswrapper[5003]: I1206 15:32:29.712398 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 06 15:32:29 crc kubenswrapper[5003]: E1206 15:32:29.712474 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 06 15:32:29 crc kubenswrapper[5003]: E1206 15:32:29.712623 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 06 15:32:29 crc kubenswrapper[5003]: I1206 15:32:29.748423 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:29 crc kubenswrapper[5003]: I1206 15:32:29.748525 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:29 crc kubenswrapper[5003]: I1206 15:32:29.748545 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:29 crc kubenswrapper[5003]: I1206 15:32:29.748569 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:29 crc kubenswrapper[5003]: I1206 15:32:29.748589 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:29Z","lastTransitionTime":"2025-12-06T15:32:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:29 crc kubenswrapper[5003]: I1206 15:32:29.852346 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:29 crc kubenswrapper[5003]: I1206 15:32:29.852407 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:29 crc kubenswrapper[5003]: I1206 15:32:29.852424 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:29 crc kubenswrapper[5003]: I1206 15:32:29.852448 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:29 crc kubenswrapper[5003]: I1206 15:32:29.852465 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:29Z","lastTransitionTime":"2025-12-06T15:32:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:29 crc kubenswrapper[5003]: I1206 15:32:29.867203 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7xwd" event={"ID":"8a695d94-271c-45bc-8a89-dfdecb57ec00","Type":"ContainerStarted","Data":"5240dcf05f3d661b92408f4c39999571d023d9acdeb31d4a4a99f50e31a627b6"} Dec 06 15:32:29 crc kubenswrapper[5003]: I1206 15:32:29.868831 5003 generic.go:334] "Generic (PLEG): container finished" podID="7dc41f9e-e763-4a9a-a064-f65bc24332b9" containerID="79eb73c778f65e6694b0a3243db40d605557c86145f9b40ce2dfcf922ec7f38f" exitCode=0 Dec 06 15:32:29 crc kubenswrapper[5003]: I1206 15:32:29.868856 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-j4rf7" event={"ID":"7dc41f9e-e763-4a9a-a064-f65bc24332b9","Type":"ContainerDied","Data":"79eb73c778f65e6694b0a3243db40d605557c86145f9b40ce2dfcf922ec7f38f"} Dec 06 15:32:29 crc kubenswrapper[5003]: I1206 15:32:29.881455 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:29Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:29 crc kubenswrapper[5003]: I1206 15:32:29.897655 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:24Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:24Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ab9b858ed30211c345e146452a18252752eeedcb3be8054daa2af371297cbb61\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:29Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:29 crc kubenswrapper[5003]: I1206 15:32:29.920194 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7xwd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a695d94-271c-45bc-8a89-dfdecb57ec00\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a00902f2c2c7ca56d86553f78094f40f59ea9ef125f5a388aafd6d7fd0c20d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9a00902f2c2c7ca56d86553f78094f40f59ea9ef125f5a388aafd6d7fd0c20d9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-p7xwd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:29Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:29 crc kubenswrapper[5003]: I1206 15:32:29.931254 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:29Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:29 crc kubenswrapper[5003]: I1206 15:32:29.944770 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-qcqkl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"956ed4a8-8918-48eb-a2f8-f35a9ae17fde\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8ae5ee47d26422cffca5f12a4ee1455dcd34c148b1b490d69b88280b4497da11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbhrw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-qcqkl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:29Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:29 crc kubenswrapper[5003]: I1206 15:32:29.956346 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:29 crc kubenswrapper[5003]: I1206 15:32:29.956388 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:29 crc kubenswrapper[5003]: I1206 15:32:29.956397 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:29 crc kubenswrapper[5003]: I1206 15:32:29.956412 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:29 crc kubenswrapper[5003]: I1206 15:32:29.956421 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:29Z","lastTransitionTime":"2025-12-06T15:32:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:29 crc kubenswrapper[5003]: I1206 15:32:29.962327 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-9kdpn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"350e8b9a-b7bf-4dc9-abe9-d10f7a088be3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e9b32883e20da5b298374f7eebd0a21c5ca51cca23543fc78dfe3edb04e3b661\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-46c8r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-9kdpn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:29Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:29 crc kubenswrapper[5003]: I1206 15:32:29.976006 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:29Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:29 crc kubenswrapper[5003]: I1206 15:32:29.993520 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-mdz5n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2a9b6e3b-054f-40de-9ad6-dd7eb78eaea1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cac97fc8a54c2fbd81b0abade11a987e5226084d8a4f75ddbbaad1f6c4cb9783\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wqmpg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:25Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-mdz5n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:29Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:30 crc kubenswrapper[5003]: I1206 15:32:30.010018 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-w25db" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1a047c4d-003e-4668-9b96-945eab34ab68\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b3a6db6c2c8e6a283f7b46ef28ba7310d3cfc7054cfd8cbcc290ebd0d26dcf4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6dn2k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b457ba1d45f5e1524b8539c4c7dc77b24f6d5aa8172b46962e31e4fb6723b7ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6dn2k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:26Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-w25db\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:30Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:30 crc kubenswrapper[5003]: I1206 15:32:30.024058 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5ae2d36f-5a31-4da3-aae8-0378c481f230\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9f683cdba22afa5d1d069cc32c6c83addf344c8b0ba3b39b7a2ca527408922c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c777cc5d07ff005aa8001da8a566484710f0253e4a6061e3297a884c6153a7d0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ac8291e7fa9a5ff379474802b6eae771542705aef4c443cc3123837d10dd9e5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae2215e89d65c7bd69288cda74cdd83f0349f57b47f80ff1fa03d60a484558b5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://efc65bc9bb60202069cb51eed80fb62e527399c4189554e791bab3e23993c95c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-06T15:32:20Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1206 15:32:14.957831 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1206 15:32:14.959306 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1213261896/tls.crt::/tmp/serving-cert-1213261896/tls.key\\\\\\\"\\\\nI1206 15:32:20.684925 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1206 15:32:20.690335 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1206 15:32:20.690422 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1206 15:32:20.690568 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1206 15:32:20.690599 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1206 15:32:20.700990 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1206 15:32:20.701016 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1206 15:32:20.701048 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1206 15:32:20.701062 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1206 15:32:20.701075 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1206 15:32:20.701085 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1206 15:32:20.701093 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1206 15:32:20.701100 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1206 15:32:20.702607 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:04Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6013b3a95ef99138e6ea971b51a45f65923c09435a2595bba7af91eebb28d76\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4256540a96bdcd018eb514cf70eea305513bb418afa89c8bfa5179c67822380e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4256540a96bdcd018eb514cf70eea305513bb418afa89c8bfa5179c67822380e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:30Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:30 crc kubenswrapper[5003]: I1206 15:32:30.038887 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c48c2555-6e1a-4e2a-801c-de22b016a80d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d0880a937c71f4a7b46ffc4dcc10f50f5db23655dedb5d7e95b36cf0fcb44928\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f5d5d47cea1d06df2f36a0a15126c2419e5051d84842c781921577bec3c65f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5e7cb74b8b6e2a00de6dbbd1689b52fcc3ae9862d40685bab7a805646abd4a3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3377599a5ceebbce7bd9d45a6f78122fc48d86ff917a6dcfc03304ca3361659\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:02Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:30Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:30 crc kubenswrapper[5003]: I1206 15:32:30.050073 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa744e792aa97558246eaa95c80a307138b1a6e08fe5de144b3fff62f3932e20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:30Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:30 crc kubenswrapper[5003]: I1206 15:32:30.058272 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:30 crc kubenswrapper[5003]: I1206 15:32:30.058317 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:30 crc kubenswrapper[5003]: I1206 15:32:30.058325 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:30 crc kubenswrapper[5003]: I1206 15:32:30.058341 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:30 crc kubenswrapper[5003]: I1206 15:32:30.058349 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:30Z","lastTransitionTime":"2025-12-06T15:32:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:30 crc kubenswrapper[5003]: I1206 15:32:30.066352 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://224b845cf75da20cf87e5435770288110776a0edd92c9c110d6dc539419ecb7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0df37db3b8682032a0065b4ca1cead69bf0b32cb4e2fab89c53e14eb2169b7e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:30Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:30 crc kubenswrapper[5003]: I1206 15:32:30.081367 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-j4rf7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7dc41f9e-e763-4a9a-a064-f65bc24332b9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://79eb73c778f65e6694b0a3243db40d605557c86145f9b40ce2dfcf922ec7f38f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://79eb73c778f65e6694b0a3243db40d605557c86145f9b40ce2dfcf922ec7f38f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-j4rf7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:30Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:30 crc kubenswrapper[5003]: I1206 15:32:30.160305 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:30 crc kubenswrapper[5003]: I1206 15:32:30.160333 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:30 crc kubenswrapper[5003]: I1206 15:32:30.160342 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:30 crc kubenswrapper[5003]: I1206 15:32:30.160355 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:30 crc kubenswrapper[5003]: I1206 15:32:30.160363 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:30Z","lastTransitionTime":"2025-12-06T15:32:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:30 crc kubenswrapper[5003]: I1206 15:32:30.265318 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:30 crc kubenswrapper[5003]: I1206 15:32:30.265375 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:30 crc kubenswrapper[5003]: I1206 15:32:30.265390 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:30 crc kubenswrapper[5003]: I1206 15:32:30.265407 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:30 crc kubenswrapper[5003]: I1206 15:32:30.265418 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:30Z","lastTransitionTime":"2025-12-06T15:32:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:30 crc kubenswrapper[5003]: I1206 15:32:30.373342 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:30 crc kubenswrapper[5003]: I1206 15:32:30.373366 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:30 crc kubenswrapper[5003]: I1206 15:32:30.373374 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:30 crc kubenswrapper[5003]: I1206 15:32:30.373386 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:30 crc kubenswrapper[5003]: I1206 15:32:30.373395 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:30Z","lastTransitionTime":"2025-12-06T15:32:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:30 crc kubenswrapper[5003]: I1206 15:32:30.476224 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:30 crc kubenswrapper[5003]: I1206 15:32:30.476516 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:30 crc kubenswrapper[5003]: I1206 15:32:30.476534 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:30 crc kubenswrapper[5003]: I1206 15:32:30.476552 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:30 crc kubenswrapper[5003]: I1206 15:32:30.476561 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:30Z","lastTransitionTime":"2025-12-06T15:32:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:30 crc kubenswrapper[5003]: I1206 15:32:30.579060 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:30 crc kubenswrapper[5003]: I1206 15:32:30.579094 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:30 crc kubenswrapper[5003]: I1206 15:32:30.579102 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:30 crc kubenswrapper[5003]: I1206 15:32:30.579116 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:30 crc kubenswrapper[5003]: I1206 15:32:30.579126 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:30Z","lastTransitionTime":"2025-12-06T15:32:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:30 crc kubenswrapper[5003]: I1206 15:32:30.680787 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:30 crc kubenswrapper[5003]: I1206 15:32:30.680827 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:30 crc kubenswrapper[5003]: I1206 15:32:30.680837 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:30 crc kubenswrapper[5003]: I1206 15:32:30.680853 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:30 crc kubenswrapper[5003]: I1206 15:32:30.680863 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:30Z","lastTransitionTime":"2025-12-06T15:32:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:30 crc kubenswrapper[5003]: I1206 15:32:30.708623 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 06 15:32:30 crc kubenswrapper[5003]: I1206 15:32:30.727091 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c48c2555-6e1a-4e2a-801c-de22b016a80d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d0880a937c71f4a7b46ffc4dcc10f50f5db23655dedb5d7e95b36cf0fcb44928\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f5d5d47cea1d06df2f36a0a15126c2419e5051d84842c781921577bec3c65f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5e7cb74b8b6e2a00de6dbbd1689b52fcc3ae9862d40685bab7a805646abd4a3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3377599a5ceebbce7bd9d45a6f78122fc48d86ff917a6dcfc03304ca3361659\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:02Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:30Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:30 crc kubenswrapper[5003]: I1206 15:32:30.739369 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa744e792aa97558246eaa95c80a307138b1a6e08fe5de144b3fff62f3932e20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:30Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:30 crc kubenswrapper[5003]: I1206 15:32:30.752117 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://224b845cf75da20cf87e5435770288110776a0edd92c9c110d6dc539419ecb7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0df37db3b8682032a0065b4ca1cead69bf0b32cb4e2fab89c53e14eb2169b7e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:30Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:30 crc kubenswrapper[5003]: I1206 15:32:30.770682 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-j4rf7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7dc41f9e-e763-4a9a-a064-f65bc24332b9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://79eb73c778f65e6694b0a3243db40d605557c86145f9b40ce2dfcf922ec7f38f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://79eb73c778f65e6694b0a3243db40d605557c86145f9b40ce2dfcf922ec7f38f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-j4rf7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:30Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:30 crc kubenswrapper[5003]: I1206 15:32:30.783048 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:30 crc kubenswrapper[5003]: I1206 15:32:30.783091 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:30 crc kubenswrapper[5003]: I1206 15:32:30.783101 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:30 crc kubenswrapper[5003]: I1206 15:32:30.783117 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:30 crc kubenswrapper[5003]: I1206 15:32:30.783126 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:30Z","lastTransitionTime":"2025-12-06T15:32:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:30 crc kubenswrapper[5003]: I1206 15:32:30.785652 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5ae2d36f-5a31-4da3-aae8-0378c481f230\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9f683cdba22afa5d1d069cc32c6c83addf344c8b0ba3b39b7a2ca527408922c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c777cc5d07ff005aa8001da8a566484710f0253e4a6061e3297a884c6153a7d0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ac8291e7fa9a5ff379474802b6eae771542705aef4c443cc3123837d10dd9e5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae2215e89d65c7bd69288cda74cdd83f0349f57b47f80ff1fa03d60a484558b5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://efc65bc9bb60202069cb51eed80fb62e527399c4189554e791bab3e23993c95c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-06T15:32:20Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1206 15:32:14.957831 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1206 15:32:14.959306 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1213261896/tls.crt::/tmp/serving-cert-1213261896/tls.key\\\\\\\"\\\\nI1206 15:32:20.684925 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1206 15:32:20.690335 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1206 15:32:20.690422 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1206 15:32:20.690568 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1206 15:32:20.690599 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1206 15:32:20.700990 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1206 15:32:20.701016 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1206 15:32:20.701048 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1206 15:32:20.701062 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1206 15:32:20.701075 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1206 15:32:20.701085 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1206 15:32:20.701093 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1206 15:32:20.701100 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1206 15:32:20.702607 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:04Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6013b3a95ef99138e6ea971b51a45f65923c09435a2595bba7af91eebb28d76\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4256540a96bdcd018eb514cf70eea305513bb418afa89c8bfa5179c67822380e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4256540a96bdcd018eb514cf70eea305513bb418afa89c8bfa5179c67822380e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:30Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:30 crc kubenswrapper[5003]: I1206 15:32:30.802815 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7xwd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a695d94-271c-45bc-8a89-dfdecb57ec00\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a00902f2c2c7ca56d86553f78094f40f59ea9ef125f5a388aafd6d7fd0c20d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9a00902f2c2c7ca56d86553f78094f40f59ea9ef125f5a388aafd6d7fd0c20d9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-p7xwd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:30Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:30 crc kubenswrapper[5003]: I1206 15:32:30.813659 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:30Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:30 crc kubenswrapper[5003]: I1206 15:32:30.824703 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:24Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:24Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ab9b858ed30211c345e146452a18252752eeedcb3be8054daa2af371297cbb61\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:30Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:30 crc kubenswrapper[5003]: I1206 15:32:30.834288 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-qcqkl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"956ed4a8-8918-48eb-a2f8-f35a9ae17fde\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8ae5ee47d26422cffca5f12a4ee1455dcd34c148b1b490d69b88280b4497da11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbhrw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-qcqkl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:30Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:30 crc kubenswrapper[5003]: I1206 15:32:30.845762 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-9kdpn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"350e8b9a-b7bf-4dc9-abe9-d10f7a088be3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e9b32883e20da5b298374f7eebd0a21c5ca51cca23543fc78dfe3edb04e3b661\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-46c8r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-9kdpn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:30Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:30 crc kubenswrapper[5003]: I1206 15:32:30.856620 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:30Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:30 crc kubenswrapper[5003]: I1206 15:32:30.866970 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-mdz5n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2a9b6e3b-054f-40de-9ad6-dd7eb78eaea1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cac97fc8a54c2fbd81b0abade11a987e5226084d8a4f75ddbbaad1f6c4cb9783\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wqmpg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:25Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-mdz5n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:30Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:30 crc kubenswrapper[5003]: I1206 15:32:30.879827 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-w25db" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1a047c4d-003e-4668-9b96-945eab34ab68\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b3a6db6c2c8e6a283f7b46ef28ba7310d3cfc7054cfd8cbcc290ebd0d26dcf4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6dn2k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b457ba1d45f5e1524b8539c4c7dc77b24f6d5aa8172b46962e31e4fb6723b7ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6dn2k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:26Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-w25db\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:30Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:30 crc kubenswrapper[5003]: I1206 15:32:30.881323 5003 generic.go:334] "Generic (PLEG): container finished" podID="7dc41f9e-e763-4a9a-a064-f65bc24332b9" containerID="10cf47f05fda3c702decadc5c1cf0b6fb4df56993610f4d7bc2809e685109f6c" exitCode=0 Dec 06 15:32:30 crc kubenswrapper[5003]: I1206 15:32:30.881381 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-j4rf7" event={"ID":"7dc41f9e-e763-4a9a-a064-f65bc24332b9","Type":"ContainerDied","Data":"10cf47f05fda3c702decadc5c1cf0b6fb4df56993610f4d7bc2809e685109f6c"} Dec 06 15:32:30 crc kubenswrapper[5003]: I1206 15:32:30.884637 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:30 crc kubenswrapper[5003]: I1206 15:32:30.884694 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:30 crc kubenswrapper[5003]: I1206 15:32:30.884706 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:30 crc kubenswrapper[5003]: I1206 15:32:30.884724 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:30 crc kubenswrapper[5003]: I1206 15:32:30.884737 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:30Z","lastTransitionTime":"2025-12-06T15:32:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:30 crc kubenswrapper[5003]: I1206 15:32:30.892227 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:30Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:30 crc kubenswrapper[5003]: I1206 15:32:30.905052 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:30Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:30 crc kubenswrapper[5003]: I1206 15:32:30.916209 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:24Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:24Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ab9b858ed30211c345e146452a18252752eeedcb3be8054daa2af371297cbb61\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:30Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:30 crc kubenswrapper[5003]: I1206 15:32:30.931609 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7xwd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a695d94-271c-45bc-8a89-dfdecb57ec00\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a00902f2c2c7ca56d86553f78094f40f59ea9ef125f5a388aafd6d7fd0c20d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9a00902f2c2c7ca56d86553f78094f40f59ea9ef125f5a388aafd6d7fd0c20d9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-p7xwd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:30Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:30 crc kubenswrapper[5003]: I1206 15:32:30.949702 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:30Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:30 crc kubenswrapper[5003]: I1206 15:32:30.961817 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-qcqkl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"956ed4a8-8918-48eb-a2f8-f35a9ae17fde\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8ae5ee47d26422cffca5f12a4ee1455dcd34c148b1b490d69b88280b4497da11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbhrw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-qcqkl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:30Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:30 crc kubenswrapper[5003]: I1206 15:32:30.972979 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-9kdpn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"350e8b9a-b7bf-4dc9-abe9-d10f7a088be3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e9b32883e20da5b298374f7eebd0a21c5ca51cca23543fc78dfe3edb04e3b661\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-46c8r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-9kdpn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:30Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:30 crc kubenswrapper[5003]: I1206 15:32:30.985566 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:30Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:30 crc kubenswrapper[5003]: I1206 15:32:30.987950 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:30 crc kubenswrapper[5003]: I1206 15:32:30.987970 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:30 crc kubenswrapper[5003]: I1206 15:32:30.987979 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:30 crc kubenswrapper[5003]: I1206 15:32:30.987992 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:30 crc kubenswrapper[5003]: I1206 15:32:30.988000 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:30Z","lastTransitionTime":"2025-12-06T15:32:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:30 crc kubenswrapper[5003]: I1206 15:32:30.996820 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-mdz5n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2a9b6e3b-054f-40de-9ad6-dd7eb78eaea1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cac97fc8a54c2fbd81b0abade11a987e5226084d8a4f75ddbbaad1f6c4cb9783\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wqmpg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:25Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-mdz5n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:30Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:31 crc kubenswrapper[5003]: I1206 15:32:31.010658 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-w25db" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1a047c4d-003e-4668-9b96-945eab34ab68\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b3a6db6c2c8e6a283f7b46ef28ba7310d3cfc7054cfd8cbcc290ebd0d26dcf4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6dn2k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b457ba1d45f5e1524b8539c4c7dc77b24f6d5aa8172b46962e31e4fb6723b7ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6dn2k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:26Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-w25db\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:31Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:31 crc kubenswrapper[5003]: I1206 15:32:31.030074 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5ae2d36f-5a31-4da3-aae8-0378c481f230\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9f683cdba22afa5d1d069cc32c6c83addf344c8b0ba3b39b7a2ca527408922c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c777cc5d07ff005aa8001da8a566484710f0253e4a6061e3297a884c6153a7d0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ac8291e7fa9a5ff379474802b6eae771542705aef4c443cc3123837d10dd9e5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae2215e89d65c7bd69288cda74cdd83f0349f57b47f80ff1fa03d60a484558b5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://efc65bc9bb60202069cb51eed80fb62e527399c4189554e791bab3e23993c95c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-06T15:32:20Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1206 15:32:14.957831 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1206 15:32:14.959306 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1213261896/tls.crt::/tmp/serving-cert-1213261896/tls.key\\\\\\\"\\\\nI1206 15:32:20.684925 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1206 15:32:20.690335 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1206 15:32:20.690422 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1206 15:32:20.690568 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1206 15:32:20.690599 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1206 15:32:20.700990 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1206 15:32:20.701016 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1206 15:32:20.701048 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1206 15:32:20.701062 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1206 15:32:20.701075 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1206 15:32:20.701085 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1206 15:32:20.701093 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1206 15:32:20.701100 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1206 15:32:20.702607 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:04Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6013b3a95ef99138e6ea971b51a45f65923c09435a2595bba7af91eebb28d76\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4256540a96bdcd018eb514cf70eea305513bb418afa89c8bfa5179c67822380e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4256540a96bdcd018eb514cf70eea305513bb418afa89c8bfa5179c67822380e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:31Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:31 crc kubenswrapper[5003]: I1206 15:32:31.046014 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c48c2555-6e1a-4e2a-801c-de22b016a80d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d0880a937c71f4a7b46ffc4dcc10f50f5db23655dedb5d7e95b36cf0fcb44928\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f5d5d47cea1d06df2f36a0a15126c2419e5051d84842c781921577bec3c65f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5e7cb74b8b6e2a00de6dbbd1689b52fcc3ae9862d40685bab7a805646abd4a3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3377599a5ceebbce7bd9d45a6f78122fc48d86ff917a6dcfc03304ca3361659\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:02Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:31Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:31 crc kubenswrapper[5003]: I1206 15:32:31.062306 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa744e792aa97558246eaa95c80a307138b1a6e08fe5de144b3fff62f3932e20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:31Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:31 crc kubenswrapper[5003]: I1206 15:32:31.074064 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://224b845cf75da20cf87e5435770288110776a0edd92c9c110d6dc539419ecb7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0df37db3b8682032a0065b4ca1cead69bf0b32cb4e2fab89c53e14eb2169b7e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:31Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:31 crc kubenswrapper[5003]: I1206 15:32:31.086534 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-j4rf7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7dc41f9e-e763-4a9a-a064-f65bc24332b9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"message\\\":\\\"containers with incomplete status: [bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://79eb73c778f65e6694b0a3243db40d605557c86145f9b40ce2dfcf922ec7f38f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://79eb73c778f65e6694b0a3243db40d605557c86145f9b40ce2dfcf922ec7f38f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://10cf47f05fda3c702decadc5c1cf0b6fb4df56993610f4d7bc2809e685109f6c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://10cf47f05fda3c702decadc5c1cf0b6fb4df56993610f4d7bc2809e685109f6c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-j4rf7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:31Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:31 crc kubenswrapper[5003]: I1206 15:32:31.089886 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:31 crc kubenswrapper[5003]: I1206 15:32:31.089928 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:31 crc kubenswrapper[5003]: I1206 15:32:31.089937 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:31 crc kubenswrapper[5003]: I1206 15:32:31.089952 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:31 crc kubenswrapper[5003]: I1206 15:32:31.089964 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:31Z","lastTransitionTime":"2025-12-06T15:32:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:31 crc kubenswrapper[5003]: I1206 15:32:31.191601 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:31 crc kubenswrapper[5003]: I1206 15:32:31.191633 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:31 crc kubenswrapper[5003]: I1206 15:32:31.191642 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:31 crc kubenswrapper[5003]: I1206 15:32:31.191654 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:31 crc kubenswrapper[5003]: I1206 15:32:31.191665 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:31Z","lastTransitionTime":"2025-12-06T15:32:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:31 crc kubenswrapper[5003]: I1206 15:32:31.293913 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:31 crc kubenswrapper[5003]: I1206 15:32:31.293968 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:31 crc kubenswrapper[5003]: I1206 15:32:31.293983 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:31 crc kubenswrapper[5003]: I1206 15:32:31.294003 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:31 crc kubenswrapper[5003]: I1206 15:32:31.294018 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:31Z","lastTransitionTime":"2025-12-06T15:32:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:31 crc kubenswrapper[5003]: I1206 15:32:31.396589 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:31 crc kubenswrapper[5003]: I1206 15:32:31.396645 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:31 crc kubenswrapper[5003]: I1206 15:32:31.396660 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:31 crc kubenswrapper[5003]: I1206 15:32:31.396683 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:31 crc kubenswrapper[5003]: I1206 15:32:31.396699 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:31Z","lastTransitionTime":"2025-12-06T15:32:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:31 crc kubenswrapper[5003]: I1206 15:32:31.500143 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:31 crc kubenswrapper[5003]: I1206 15:32:31.500217 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:31 crc kubenswrapper[5003]: I1206 15:32:31.500236 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:31 crc kubenswrapper[5003]: I1206 15:32:31.500260 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:31 crc kubenswrapper[5003]: I1206 15:32:31.500278 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:31Z","lastTransitionTime":"2025-12-06T15:32:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:31 crc kubenswrapper[5003]: I1206 15:32:31.599976 5003 transport.go:147] "Certificate rotation detected, shutting down client connections to start using new credentials" Dec 06 15:32:31 crc kubenswrapper[5003]: I1206 15:32:31.665606 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:31 crc kubenswrapper[5003]: I1206 15:32:31.665932 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:31 crc kubenswrapper[5003]: I1206 15:32:31.665945 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:31 crc kubenswrapper[5003]: I1206 15:32:31.665963 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:31 crc kubenswrapper[5003]: I1206 15:32:31.665977 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:31Z","lastTransitionTime":"2025-12-06T15:32:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:31 crc kubenswrapper[5003]: I1206 15:32:31.711317 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 06 15:32:31 crc kubenswrapper[5003]: E1206 15:32:31.711581 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 06 15:32:31 crc kubenswrapper[5003]: I1206 15:32:31.711712 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 06 15:32:31 crc kubenswrapper[5003]: E1206 15:32:31.711847 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 06 15:32:31 crc kubenswrapper[5003]: I1206 15:32:31.711899 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 06 15:32:31 crc kubenswrapper[5003]: E1206 15:32:31.712056 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 06 15:32:31 crc kubenswrapper[5003]: I1206 15:32:31.732147 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:31Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:31 crc kubenswrapper[5003]: I1206 15:32:31.745712 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:24Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:24Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ab9b858ed30211c345e146452a18252752eeedcb3be8054daa2af371297cbb61\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:31Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:31 crc kubenswrapper[5003]: I1206 15:32:31.772755 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:31 crc kubenswrapper[5003]: I1206 15:32:31.772811 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:31 crc kubenswrapper[5003]: I1206 15:32:31.772827 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:31 crc kubenswrapper[5003]: I1206 15:32:31.772846 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:31 crc kubenswrapper[5003]: I1206 15:32:31.772860 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:31Z","lastTransitionTime":"2025-12-06T15:32:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:31 crc kubenswrapper[5003]: I1206 15:32:31.775893 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7xwd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a695d94-271c-45bc-8a89-dfdecb57ec00\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a00902f2c2c7ca56d86553f78094f40f59ea9ef125f5a388aafd6d7fd0c20d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9a00902f2c2c7ca56d86553f78094f40f59ea9ef125f5a388aafd6d7fd0c20d9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-p7xwd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:31Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:31 crc kubenswrapper[5003]: I1206 15:32:31.832232 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:31Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:31 crc kubenswrapper[5003]: I1206 15:32:31.847524 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-qcqkl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"956ed4a8-8918-48eb-a2f8-f35a9ae17fde\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8ae5ee47d26422cffca5f12a4ee1455dcd34c148b1b490d69b88280b4497da11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbhrw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-qcqkl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:31Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:31 crc kubenswrapper[5003]: I1206 15:32:31.861099 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-9kdpn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"350e8b9a-b7bf-4dc9-abe9-d10f7a088be3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e9b32883e20da5b298374f7eebd0a21c5ca51cca23543fc78dfe3edb04e3b661\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-46c8r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-9kdpn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:31Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:31 crc kubenswrapper[5003]: I1206 15:32:31.874268 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:31Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:31 crc kubenswrapper[5003]: I1206 15:32:31.874724 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:31 crc kubenswrapper[5003]: I1206 15:32:31.874751 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:31 crc kubenswrapper[5003]: I1206 15:32:31.874759 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:31 crc kubenswrapper[5003]: I1206 15:32:31.874772 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:31 crc kubenswrapper[5003]: I1206 15:32:31.874782 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:31Z","lastTransitionTime":"2025-12-06T15:32:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:31 crc kubenswrapper[5003]: I1206 15:32:31.883510 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-mdz5n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2a9b6e3b-054f-40de-9ad6-dd7eb78eaea1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cac97fc8a54c2fbd81b0abade11a987e5226084d8a4f75ddbbaad1f6c4cb9783\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wqmpg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:25Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-mdz5n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:31Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:31 crc kubenswrapper[5003]: I1206 15:32:31.905584 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7xwd" event={"ID":"8a695d94-271c-45bc-8a89-dfdecb57ec00","Type":"ContainerStarted","Data":"c56c8c7aa570bb81c41923b20b09d6de6b278e56ca466e7e0ee3cecf113c37d6"} Dec 06 15:32:31 crc kubenswrapper[5003]: I1206 15:32:31.907956 5003 generic.go:334] "Generic (PLEG): container finished" podID="7dc41f9e-e763-4a9a-a064-f65bc24332b9" containerID="a33d5c66fd9234a89ff1f28e4863f80104b5d1caccd24cafa22a4598a42906c3" exitCode=0 Dec 06 15:32:31 crc kubenswrapper[5003]: I1206 15:32:31.907991 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-j4rf7" event={"ID":"7dc41f9e-e763-4a9a-a064-f65bc24332b9","Type":"ContainerDied","Data":"a33d5c66fd9234a89ff1f28e4863f80104b5d1caccd24cafa22a4598a42906c3"} Dec 06 15:32:31 crc kubenswrapper[5003]: I1206 15:32:31.907713 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-w25db" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1a047c4d-003e-4668-9b96-945eab34ab68\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b3a6db6c2c8e6a283f7b46ef28ba7310d3cfc7054cfd8cbcc290ebd0d26dcf4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6dn2k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b457ba1d45f5e1524b8539c4c7dc77b24f6d5aa8172b46962e31e4fb6723b7ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6dn2k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:26Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-w25db\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:31Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:31 crc kubenswrapper[5003]: I1206 15:32:31.925351 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5ae2d36f-5a31-4da3-aae8-0378c481f230\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9f683cdba22afa5d1d069cc32c6c83addf344c8b0ba3b39b7a2ca527408922c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c777cc5d07ff005aa8001da8a566484710f0253e4a6061e3297a884c6153a7d0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ac8291e7fa9a5ff379474802b6eae771542705aef4c443cc3123837d10dd9e5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae2215e89d65c7bd69288cda74cdd83f0349f57b47f80ff1fa03d60a484558b5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://efc65bc9bb60202069cb51eed80fb62e527399c4189554e791bab3e23993c95c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-06T15:32:20Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1206 15:32:14.957831 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1206 15:32:14.959306 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1213261896/tls.crt::/tmp/serving-cert-1213261896/tls.key\\\\\\\"\\\\nI1206 15:32:20.684925 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1206 15:32:20.690335 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1206 15:32:20.690422 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1206 15:32:20.690568 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1206 15:32:20.690599 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1206 15:32:20.700990 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1206 15:32:20.701016 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1206 15:32:20.701048 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1206 15:32:20.701062 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1206 15:32:20.701075 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1206 15:32:20.701085 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1206 15:32:20.701093 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1206 15:32:20.701100 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1206 15:32:20.702607 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:04Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6013b3a95ef99138e6ea971b51a45f65923c09435a2595bba7af91eebb28d76\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4256540a96bdcd018eb514cf70eea305513bb418afa89c8bfa5179c67822380e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4256540a96bdcd018eb514cf70eea305513bb418afa89c8bfa5179c67822380e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:31Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:31 crc kubenswrapper[5003]: I1206 15:32:31.938994 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c48c2555-6e1a-4e2a-801c-de22b016a80d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d0880a937c71f4a7b46ffc4dcc10f50f5db23655dedb5d7e95b36cf0fcb44928\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f5d5d47cea1d06df2f36a0a15126c2419e5051d84842c781921577bec3c65f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5e7cb74b8b6e2a00de6dbbd1689b52fcc3ae9862d40685bab7a805646abd4a3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3377599a5ceebbce7bd9d45a6f78122fc48d86ff917a6dcfc03304ca3361659\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:02Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:31Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:31 crc kubenswrapper[5003]: I1206 15:32:31.954826 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa744e792aa97558246eaa95c80a307138b1a6e08fe5de144b3fff62f3932e20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:31Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:31 crc kubenswrapper[5003]: I1206 15:32:31.967676 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://224b845cf75da20cf87e5435770288110776a0edd92c9c110d6dc539419ecb7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0df37db3b8682032a0065b4ca1cead69bf0b32cb4e2fab89c53e14eb2169b7e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:31Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:31 crc kubenswrapper[5003]: I1206 15:32:31.980279 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-j4rf7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7dc41f9e-e763-4a9a-a064-f65bc24332b9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"message\\\":\\\"containers with incomplete status: [bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://79eb73c778f65e6694b0a3243db40d605557c86145f9b40ce2dfcf922ec7f38f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://79eb73c778f65e6694b0a3243db40d605557c86145f9b40ce2dfcf922ec7f38f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://10cf47f05fda3c702decadc5c1cf0b6fb4df56993610f4d7bc2809e685109f6c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://10cf47f05fda3c702decadc5c1cf0b6fb4df56993610f4d7bc2809e685109f6c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-j4rf7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:31Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:31 crc kubenswrapper[5003]: I1206 15:32:31.980917 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:31 crc kubenswrapper[5003]: I1206 15:32:31.980962 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:31 crc kubenswrapper[5003]: I1206 15:32:31.980975 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:31 crc kubenswrapper[5003]: I1206 15:32:31.980994 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:31 crc kubenswrapper[5003]: I1206 15:32:31.981011 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:31Z","lastTransitionTime":"2025-12-06T15:32:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:31 crc kubenswrapper[5003]: I1206 15:32:31.997115 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5ae2d36f-5a31-4da3-aae8-0378c481f230\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9f683cdba22afa5d1d069cc32c6c83addf344c8b0ba3b39b7a2ca527408922c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c777cc5d07ff005aa8001da8a566484710f0253e4a6061e3297a884c6153a7d0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ac8291e7fa9a5ff379474802b6eae771542705aef4c443cc3123837d10dd9e5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae2215e89d65c7bd69288cda74cdd83f0349f57b47f80ff1fa03d60a484558b5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://efc65bc9bb60202069cb51eed80fb62e527399c4189554e791bab3e23993c95c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-06T15:32:20Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1206 15:32:14.957831 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1206 15:32:14.959306 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1213261896/tls.crt::/tmp/serving-cert-1213261896/tls.key\\\\\\\"\\\\nI1206 15:32:20.684925 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1206 15:32:20.690335 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1206 15:32:20.690422 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1206 15:32:20.690568 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1206 15:32:20.690599 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1206 15:32:20.700990 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1206 15:32:20.701016 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1206 15:32:20.701048 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1206 15:32:20.701062 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1206 15:32:20.701075 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1206 15:32:20.701085 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1206 15:32:20.701093 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1206 15:32:20.701100 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1206 15:32:20.702607 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:04Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6013b3a95ef99138e6ea971b51a45f65923c09435a2595bba7af91eebb28d76\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4256540a96bdcd018eb514cf70eea305513bb418afa89c8bfa5179c67822380e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4256540a96bdcd018eb514cf70eea305513bb418afa89c8bfa5179c67822380e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:31Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:32 crc kubenswrapper[5003]: I1206 15:32:32.007831 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c48c2555-6e1a-4e2a-801c-de22b016a80d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d0880a937c71f4a7b46ffc4dcc10f50f5db23655dedb5d7e95b36cf0fcb44928\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f5d5d47cea1d06df2f36a0a15126c2419e5051d84842c781921577bec3c65f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5e7cb74b8b6e2a00de6dbbd1689b52fcc3ae9862d40685bab7a805646abd4a3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3377599a5ceebbce7bd9d45a6f78122fc48d86ff917a6dcfc03304ca3361659\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:02Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:32Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:32 crc kubenswrapper[5003]: I1206 15:32:32.021267 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa744e792aa97558246eaa95c80a307138b1a6e08fe5de144b3fff62f3932e20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:32Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:32 crc kubenswrapper[5003]: I1206 15:32:32.036099 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://224b845cf75da20cf87e5435770288110776a0edd92c9c110d6dc539419ecb7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0df37db3b8682032a0065b4ca1cead69bf0b32cb4e2fab89c53e14eb2169b7e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:32Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:32 crc kubenswrapper[5003]: I1206 15:32:32.050029 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-j4rf7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7dc41f9e-e763-4a9a-a064-f65bc24332b9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"message\\\":\\\"containers with incomplete status: [routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://79eb73c778f65e6694b0a3243db40d605557c86145f9b40ce2dfcf922ec7f38f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://79eb73c778f65e6694b0a3243db40d605557c86145f9b40ce2dfcf922ec7f38f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://10cf47f05fda3c702decadc5c1cf0b6fb4df56993610f4d7bc2809e685109f6c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://10cf47f05fda3c702decadc5c1cf0b6fb4df56993610f4d7bc2809e685109f6c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a33d5c66fd9234a89ff1f28e4863f80104b5d1caccd24cafa22a4598a42906c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a33d5c66fd9234a89ff1f28e4863f80104b5d1caccd24cafa22a4598a42906c3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-j4rf7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:32Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:32 crc kubenswrapper[5003]: I1206 15:32:32.062230 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:32Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:32 crc kubenswrapper[5003]: I1206 15:32:32.072382 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:24Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:24Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ab9b858ed30211c345e146452a18252752eeedcb3be8054daa2af371297cbb61\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:32Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:32 crc kubenswrapper[5003]: I1206 15:32:32.082849 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:32 crc kubenswrapper[5003]: I1206 15:32:32.082892 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:32 crc kubenswrapper[5003]: I1206 15:32:32.082903 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:32 crc kubenswrapper[5003]: I1206 15:32:32.082918 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:32 crc kubenswrapper[5003]: I1206 15:32:32.082928 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:32Z","lastTransitionTime":"2025-12-06T15:32:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:32 crc kubenswrapper[5003]: I1206 15:32:32.091110 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7xwd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a695d94-271c-45bc-8a89-dfdecb57ec00\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a00902f2c2c7ca56d86553f78094f40f59ea9ef125f5a388aafd6d7fd0c20d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9a00902f2c2c7ca56d86553f78094f40f59ea9ef125f5a388aafd6d7fd0c20d9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-p7xwd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:32Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:32 crc kubenswrapper[5003]: I1206 15:32:32.101376 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:32Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:32 crc kubenswrapper[5003]: I1206 15:32:32.109883 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-qcqkl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"956ed4a8-8918-48eb-a2f8-f35a9ae17fde\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8ae5ee47d26422cffca5f12a4ee1455dcd34c148b1b490d69b88280b4497da11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbhrw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-qcqkl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:32Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:32 crc kubenswrapper[5003]: I1206 15:32:32.121762 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-9kdpn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"350e8b9a-b7bf-4dc9-abe9-d10f7a088be3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e9b32883e20da5b298374f7eebd0a21c5ca51cca23543fc78dfe3edb04e3b661\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-46c8r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-9kdpn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:32Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:32 crc kubenswrapper[5003]: I1206 15:32:32.134185 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:32Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:32 crc kubenswrapper[5003]: I1206 15:32:32.145745 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-mdz5n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2a9b6e3b-054f-40de-9ad6-dd7eb78eaea1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cac97fc8a54c2fbd81b0abade11a987e5226084d8a4f75ddbbaad1f6c4cb9783\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wqmpg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:25Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-mdz5n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:32Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:32 crc kubenswrapper[5003]: I1206 15:32:32.156617 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-w25db" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1a047c4d-003e-4668-9b96-945eab34ab68\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b3a6db6c2c8e6a283f7b46ef28ba7310d3cfc7054cfd8cbcc290ebd0d26dcf4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6dn2k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b457ba1d45f5e1524b8539c4c7dc77b24f6d5aa8172b46962e31e4fb6723b7ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6dn2k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:26Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-w25db\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:32Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:32 crc kubenswrapper[5003]: I1206 15:32:32.185073 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:32 crc kubenswrapper[5003]: I1206 15:32:32.185121 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:32 crc kubenswrapper[5003]: I1206 15:32:32.185132 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:32 crc kubenswrapper[5003]: I1206 15:32:32.185145 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:32 crc kubenswrapper[5003]: I1206 15:32:32.185155 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:32Z","lastTransitionTime":"2025-12-06T15:32:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:32 crc kubenswrapper[5003]: I1206 15:32:32.287309 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:32 crc kubenswrapper[5003]: I1206 15:32:32.287352 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:32 crc kubenswrapper[5003]: I1206 15:32:32.287367 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:32 crc kubenswrapper[5003]: I1206 15:32:32.287386 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:32 crc kubenswrapper[5003]: I1206 15:32:32.287400 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:32Z","lastTransitionTime":"2025-12-06T15:32:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:32 crc kubenswrapper[5003]: I1206 15:32:32.390647 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:32 crc kubenswrapper[5003]: I1206 15:32:32.390697 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:32 crc kubenswrapper[5003]: I1206 15:32:32.390715 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:32 crc kubenswrapper[5003]: I1206 15:32:32.390736 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:32 crc kubenswrapper[5003]: I1206 15:32:32.390751 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:32Z","lastTransitionTime":"2025-12-06T15:32:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:32 crc kubenswrapper[5003]: I1206 15:32:32.494672 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:32 crc kubenswrapper[5003]: I1206 15:32:32.496082 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:32 crc kubenswrapper[5003]: I1206 15:32:32.496096 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:32 crc kubenswrapper[5003]: I1206 15:32:32.496116 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:32 crc kubenswrapper[5003]: I1206 15:32:32.496127 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:32Z","lastTransitionTime":"2025-12-06T15:32:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:32 crc kubenswrapper[5003]: I1206 15:32:32.598727 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:32 crc kubenswrapper[5003]: I1206 15:32:32.598787 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:32 crc kubenswrapper[5003]: I1206 15:32:32.598809 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:32 crc kubenswrapper[5003]: I1206 15:32:32.598830 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:32 crc kubenswrapper[5003]: I1206 15:32:32.598847 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:32Z","lastTransitionTime":"2025-12-06T15:32:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:32 crc kubenswrapper[5003]: I1206 15:32:32.702192 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:32 crc kubenswrapper[5003]: I1206 15:32:32.702230 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:32 crc kubenswrapper[5003]: I1206 15:32:32.702238 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:32 crc kubenswrapper[5003]: I1206 15:32:32.702252 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:32 crc kubenswrapper[5003]: I1206 15:32:32.702262 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:32Z","lastTransitionTime":"2025-12-06T15:32:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:32 crc kubenswrapper[5003]: I1206 15:32:32.804458 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:32 crc kubenswrapper[5003]: I1206 15:32:32.804528 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:32 crc kubenswrapper[5003]: I1206 15:32:32.804539 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:32 crc kubenswrapper[5003]: I1206 15:32:32.804555 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:32 crc kubenswrapper[5003]: I1206 15:32:32.804566 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:32Z","lastTransitionTime":"2025-12-06T15:32:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:32 crc kubenswrapper[5003]: I1206 15:32:32.906601 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:32 crc kubenswrapper[5003]: I1206 15:32:32.906656 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:32 crc kubenswrapper[5003]: I1206 15:32:32.906668 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:32 crc kubenswrapper[5003]: I1206 15:32:32.906686 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:32 crc kubenswrapper[5003]: I1206 15:32:32.906698 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:32Z","lastTransitionTime":"2025-12-06T15:32:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:32 crc kubenswrapper[5003]: I1206 15:32:32.914122 5003 generic.go:334] "Generic (PLEG): container finished" podID="7dc41f9e-e763-4a9a-a064-f65bc24332b9" containerID="895f9f2647488793d0875e8cb05a6e6515733dae00a856e6a4705702adc3a5ed" exitCode=0 Dec 06 15:32:32 crc kubenswrapper[5003]: I1206 15:32:32.914160 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-j4rf7" event={"ID":"7dc41f9e-e763-4a9a-a064-f65bc24332b9","Type":"ContainerDied","Data":"895f9f2647488793d0875e8cb05a6e6515733dae00a856e6a4705702adc3a5ed"} Dec 06 15:32:32 crc kubenswrapper[5003]: I1206 15:32:32.928546 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:32Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:32 crc kubenswrapper[5003]: I1206 15:32:32.939802 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-mdz5n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2a9b6e3b-054f-40de-9ad6-dd7eb78eaea1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cac97fc8a54c2fbd81b0abade11a987e5226084d8a4f75ddbbaad1f6c4cb9783\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wqmpg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:25Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-mdz5n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:32Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:32 crc kubenswrapper[5003]: I1206 15:32:32.954611 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-w25db" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1a047c4d-003e-4668-9b96-945eab34ab68\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b3a6db6c2c8e6a283f7b46ef28ba7310d3cfc7054cfd8cbcc290ebd0d26dcf4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6dn2k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b457ba1d45f5e1524b8539c4c7dc77b24f6d5aa8172b46962e31e4fb6723b7ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6dn2k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:26Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-w25db\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:32Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:32 crc kubenswrapper[5003]: I1206 15:32:32.972092 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5ae2d36f-5a31-4da3-aae8-0378c481f230\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9f683cdba22afa5d1d069cc32c6c83addf344c8b0ba3b39b7a2ca527408922c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c777cc5d07ff005aa8001da8a566484710f0253e4a6061e3297a884c6153a7d0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ac8291e7fa9a5ff379474802b6eae771542705aef4c443cc3123837d10dd9e5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae2215e89d65c7bd69288cda74cdd83f0349f57b47f80ff1fa03d60a484558b5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://efc65bc9bb60202069cb51eed80fb62e527399c4189554e791bab3e23993c95c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-06T15:32:20Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1206 15:32:14.957831 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1206 15:32:14.959306 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1213261896/tls.crt::/tmp/serving-cert-1213261896/tls.key\\\\\\\"\\\\nI1206 15:32:20.684925 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1206 15:32:20.690335 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1206 15:32:20.690422 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1206 15:32:20.690568 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1206 15:32:20.690599 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1206 15:32:20.700990 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1206 15:32:20.701016 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1206 15:32:20.701048 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1206 15:32:20.701062 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1206 15:32:20.701075 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1206 15:32:20.701085 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1206 15:32:20.701093 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1206 15:32:20.701100 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1206 15:32:20.702607 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:04Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6013b3a95ef99138e6ea971b51a45f65923c09435a2595bba7af91eebb28d76\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4256540a96bdcd018eb514cf70eea305513bb418afa89c8bfa5179c67822380e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4256540a96bdcd018eb514cf70eea305513bb418afa89c8bfa5179c67822380e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:32Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:32 crc kubenswrapper[5003]: I1206 15:32:32.984440 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c48c2555-6e1a-4e2a-801c-de22b016a80d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d0880a937c71f4a7b46ffc4dcc10f50f5db23655dedb5d7e95b36cf0fcb44928\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f5d5d47cea1d06df2f36a0a15126c2419e5051d84842c781921577bec3c65f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5e7cb74b8b6e2a00de6dbbd1689b52fcc3ae9862d40685bab7a805646abd4a3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3377599a5ceebbce7bd9d45a6f78122fc48d86ff917a6dcfc03304ca3361659\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:02Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:32Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:32 crc kubenswrapper[5003]: I1206 15:32:32.997986 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa744e792aa97558246eaa95c80a307138b1a6e08fe5de144b3fff62f3932e20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:32Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:33 crc kubenswrapper[5003]: I1206 15:32:33.010231 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:33 crc kubenswrapper[5003]: I1206 15:32:33.010266 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:33 crc kubenswrapper[5003]: I1206 15:32:33.010276 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:33 crc kubenswrapper[5003]: I1206 15:32:33.010289 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:33 crc kubenswrapper[5003]: I1206 15:32:33.010299 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:33Z","lastTransitionTime":"2025-12-06T15:32:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:33 crc kubenswrapper[5003]: I1206 15:32:33.011343 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://224b845cf75da20cf87e5435770288110776a0edd92c9c110d6dc539419ecb7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0df37db3b8682032a0065b4ca1cead69bf0b32cb4e2fab89c53e14eb2169b7e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:33Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:33 crc kubenswrapper[5003]: I1206 15:32:33.025166 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-j4rf7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7dc41f9e-e763-4a9a-a064-f65bc24332b9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://79eb73c778f65e6694b0a3243db40d605557c86145f9b40ce2dfcf922ec7f38f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://79eb73c778f65e6694b0a3243db40d605557c86145f9b40ce2dfcf922ec7f38f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://10cf47f05fda3c702decadc5c1cf0b6fb4df56993610f4d7bc2809e685109f6c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://10cf47f05fda3c702decadc5c1cf0b6fb4df56993610f4d7bc2809e685109f6c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a33d5c66fd9234a89ff1f28e4863f80104b5d1caccd24cafa22a4598a42906c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a33d5c66fd9234a89ff1f28e4863f80104b5d1caccd24cafa22a4598a42906c3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://895f9f2647488793d0875e8cb05a6e6515733dae00a856e6a4705702adc3a5ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://895f9f2647488793d0875e8cb05a6e6515733dae00a856e6a4705702adc3a5ed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-j4rf7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:33Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:33 crc kubenswrapper[5003]: I1206 15:32:33.036836 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:33Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:33 crc kubenswrapper[5003]: I1206 15:32:33.046913 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:24Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:24Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ab9b858ed30211c345e146452a18252752eeedcb3be8054daa2af371297cbb61\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:33Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:33 crc kubenswrapper[5003]: I1206 15:32:33.072251 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7xwd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a695d94-271c-45bc-8a89-dfdecb57ec00\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a00902f2c2c7ca56d86553f78094f40f59ea9ef125f5a388aafd6d7fd0c20d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9a00902f2c2c7ca56d86553f78094f40f59ea9ef125f5a388aafd6d7fd0c20d9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-p7xwd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:33Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:33 crc kubenswrapper[5003]: I1206 15:32:33.083981 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:33Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:33 crc kubenswrapper[5003]: I1206 15:32:33.094482 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-qcqkl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"956ed4a8-8918-48eb-a2f8-f35a9ae17fde\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8ae5ee47d26422cffca5f12a4ee1455dcd34c148b1b490d69b88280b4497da11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbhrw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-qcqkl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:33Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:33 crc kubenswrapper[5003]: I1206 15:32:33.105154 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-9kdpn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"350e8b9a-b7bf-4dc9-abe9-d10f7a088be3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e9b32883e20da5b298374f7eebd0a21c5ca51cca23543fc78dfe3edb04e3b661\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-46c8r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-9kdpn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:33Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:33 crc kubenswrapper[5003]: I1206 15:32:33.113025 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:33 crc kubenswrapper[5003]: I1206 15:32:33.113160 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:33 crc kubenswrapper[5003]: I1206 15:32:33.113228 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:33 crc kubenswrapper[5003]: I1206 15:32:33.113300 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:33 crc kubenswrapper[5003]: I1206 15:32:33.113355 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:33Z","lastTransitionTime":"2025-12-06T15:32:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:33 crc kubenswrapper[5003]: I1206 15:32:33.216423 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:33 crc kubenswrapper[5003]: I1206 15:32:33.216516 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:33 crc kubenswrapper[5003]: I1206 15:32:33.216541 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:33 crc kubenswrapper[5003]: I1206 15:32:33.216569 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:33 crc kubenswrapper[5003]: I1206 15:32:33.216586 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:33Z","lastTransitionTime":"2025-12-06T15:32:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:33 crc kubenswrapper[5003]: I1206 15:32:33.318990 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:33 crc kubenswrapper[5003]: I1206 15:32:33.319324 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:33 crc kubenswrapper[5003]: I1206 15:32:33.319335 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:33 crc kubenswrapper[5003]: I1206 15:32:33.319348 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:33 crc kubenswrapper[5003]: I1206 15:32:33.319358 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:33Z","lastTransitionTime":"2025-12-06T15:32:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:33 crc kubenswrapper[5003]: I1206 15:32:33.421336 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:33 crc kubenswrapper[5003]: I1206 15:32:33.421377 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:33 crc kubenswrapper[5003]: I1206 15:32:33.421388 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:33 crc kubenswrapper[5003]: I1206 15:32:33.421464 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:33 crc kubenswrapper[5003]: I1206 15:32:33.421478 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:33Z","lastTransitionTime":"2025-12-06T15:32:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:33 crc kubenswrapper[5003]: I1206 15:32:33.523474 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:33 crc kubenswrapper[5003]: I1206 15:32:33.523520 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:33 crc kubenswrapper[5003]: I1206 15:32:33.523529 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:33 crc kubenswrapper[5003]: I1206 15:32:33.523541 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:33 crc kubenswrapper[5003]: I1206 15:32:33.523551 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:33Z","lastTransitionTime":"2025-12-06T15:32:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:33 crc kubenswrapper[5003]: I1206 15:32:33.626144 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:33 crc kubenswrapper[5003]: I1206 15:32:33.626205 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:33 crc kubenswrapper[5003]: I1206 15:32:33.626218 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:33 crc kubenswrapper[5003]: I1206 15:32:33.626239 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:33 crc kubenswrapper[5003]: I1206 15:32:33.626255 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:33Z","lastTransitionTime":"2025-12-06T15:32:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:33 crc kubenswrapper[5003]: I1206 15:32:33.711362 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 06 15:32:33 crc kubenswrapper[5003]: I1206 15:32:33.711406 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 06 15:32:33 crc kubenswrapper[5003]: E1206 15:32:33.711510 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 06 15:32:33 crc kubenswrapper[5003]: I1206 15:32:33.711559 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 06 15:32:33 crc kubenswrapper[5003]: E1206 15:32:33.711643 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 06 15:32:33 crc kubenswrapper[5003]: E1206 15:32:33.711704 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 06 15:32:33 crc kubenswrapper[5003]: I1206 15:32:33.728856 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:33 crc kubenswrapper[5003]: I1206 15:32:33.728899 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:33 crc kubenswrapper[5003]: I1206 15:32:33.728909 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:33 crc kubenswrapper[5003]: I1206 15:32:33.728924 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:33 crc kubenswrapper[5003]: I1206 15:32:33.728937 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:33Z","lastTransitionTime":"2025-12-06T15:32:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:33 crc kubenswrapper[5003]: I1206 15:32:33.831389 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:33 crc kubenswrapper[5003]: I1206 15:32:33.831424 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:33 crc kubenswrapper[5003]: I1206 15:32:33.831434 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:33 crc kubenswrapper[5003]: I1206 15:32:33.831448 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:33 crc kubenswrapper[5003]: I1206 15:32:33.831502 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:33Z","lastTransitionTime":"2025-12-06T15:32:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:33 crc kubenswrapper[5003]: I1206 15:32:33.923621 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7xwd" event={"ID":"8a695d94-271c-45bc-8a89-dfdecb57ec00","Type":"ContainerStarted","Data":"3c095b3b6f31d202807b1b8e423678c175c6f91aae8fc9318e9672edc467ab1b"} Dec 06 15:32:33 crc kubenswrapper[5003]: I1206 15:32:33.923695 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-p7xwd" Dec 06 15:32:33 crc kubenswrapper[5003]: I1206 15:32:33.923721 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-p7xwd" Dec 06 15:32:33 crc kubenswrapper[5003]: I1206 15:32:33.929575 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-j4rf7" event={"ID":"7dc41f9e-e763-4a9a-a064-f65bc24332b9","Type":"ContainerStarted","Data":"a02f680341e0dfb569ffcd7902d5b1fe3d575a423ff541d86c7bdd4d76b733b0"} Dec 06 15:32:33 crc kubenswrapper[5003]: I1206 15:32:33.936593 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:33 crc kubenswrapper[5003]: I1206 15:32:33.936646 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:33 crc kubenswrapper[5003]: I1206 15:32:33.936662 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:33 crc kubenswrapper[5003]: I1206 15:32:33.936682 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:33 crc kubenswrapper[5003]: I1206 15:32:33.936697 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:33Z","lastTransitionTime":"2025-12-06T15:32:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:33 crc kubenswrapper[5003]: I1206 15:32:33.945995 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:33Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:33 crc kubenswrapper[5003]: I1206 15:32:33.962536 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-qcqkl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"956ed4a8-8918-48eb-a2f8-f35a9ae17fde\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8ae5ee47d26422cffca5f12a4ee1455dcd34c148b1b490d69b88280b4497da11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbhrw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-qcqkl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:33Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:33 crc kubenswrapper[5003]: I1206 15:32:33.963033 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-p7xwd" Dec 06 15:32:33 crc kubenswrapper[5003]: I1206 15:32:33.963456 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-p7xwd" Dec 06 15:32:33 crc kubenswrapper[5003]: I1206 15:32:33.979056 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-9kdpn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"350e8b9a-b7bf-4dc9-abe9-d10f7a088be3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e9b32883e20da5b298374f7eebd0a21c5ca51cca23543fc78dfe3edb04e3b661\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-46c8r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-9kdpn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:33Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:33 crc kubenswrapper[5003]: I1206 15:32:33.990939 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:33Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:34 crc kubenswrapper[5003]: I1206 15:32:34.001196 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-mdz5n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2a9b6e3b-054f-40de-9ad6-dd7eb78eaea1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cac97fc8a54c2fbd81b0abade11a987e5226084d8a4f75ddbbaad1f6c4cb9783\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wqmpg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:25Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-mdz5n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:33Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:34 crc kubenswrapper[5003]: I1206 15:32:34.013728 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-w25db" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1a047c4d-003e-4668-9b96-945eab34ab68\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b3a6db6c2c8e6a283f7b46ef28ba7310d3cfc7054cfd8cbcc290ebd0d26dcf4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6dn2k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b457ba1d45f5e1524b8539c4c7dc77b24f6d5aa8172b46962e31e4fb6723b7ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6dn2k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:26Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-w25db\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:34Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:34 crc kubenswrapper[5003]: I1206 15:32:34.030939 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5ae2d36f-5a31-4da3-aae8-0378c481f230\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9f683cdba22afa5d1d069cc32c6c83addf344c8b0ba3b39b7a2ca527408922c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c777cc5d07ff005aa8001da8a566484710f0253e4a6061e3297a884c6153a7d0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ac8291e7fa9a5ff379474802b6eae771542705aef4c443cc3123837d10dd9e5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae2215e89d65c7bd69288cda74cdd83f0349f57b47f80ff1fa03d60a484558b5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://efc65bc9bb60202069cb51eed80fb62e527399c4189554e791bab3e23993c95c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-06T15:32:20Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1206 15:32:14.957831 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1206 15:32:14.959306 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1213261896/tls.crt::/tmp/serving-cert-1213261896/tls.key\\\\\\\"\\\\nI1206 15:32:20.684925 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1206 15:32:20.690335 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1206 15:32:20.690422 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1206 15:32:20.690568 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1206 15:32:20.690599 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1206 15:32:20.700990 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1206 15:32:20.701016 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1206 15:32:20.701048 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1206 15:32:20.701062 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1206 15:32:20.701075 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1206 15:32:20.701085 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1206 15:32:20.701093 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1206 15:32:20.701100 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1206 15:32:20.702607 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:04Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6013b3a95ef99138e6ea971b51a45f65923c09435a2595bba7af91eebb28d76\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4256540a96bdcd018eb514cf70eea305513bb418afa89c8bfa5179c67822380e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4256540a96bdcd018eb514cf70eea305513bb418afa89c8bfa5179c67822380e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:34Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:34 crc kubenswrapper[5003]: I1206 15:32:34.039048 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:34 crc kubenswrapper[5003]: I1206 15:32:34.039089 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:34 crc kubenswrapper[5003]: I1206 15:32:34.039099 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:34 crc kubenswrapper[5003]: I1206 15:32:34.039114 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:34 crc kubenswrapper[5003]: I1206 15:32:34.039124 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:34Z","lastTransitionTime":"2025-12-06T15:32:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:34 crc kubenswrapper[5003]: I1206 15:32:34.045160 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c48c2555-6e1a-4e2a-801c-de22b016a80d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d0880a937c71f4a7b46ffc4dcc10f50f5db23655dedb5d7e95b36cf0fcb44928\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f5d5d47cea1d06df2f36a0a15126c2419e5051d84842c781921577bec3c65f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5e7cb74b8b6e2a00de6dbbd1689b52fcc3ae9862d40685bab7a805646abd4a3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3377599a5ceebbce7bd9d45a6f78122fc48d86ff917a6dcfc03304ca3361659\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:02Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:34Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:34 crc kubenswrapper[5003]: I1206 15:32:34.047323 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-p7xwd" Dec 06 15:32:34 crc kubenswrapper[5003]: I1206 15:32:34.057226 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa744e792aa97558246eaa95c80a307138b1a6e08fe5de144b3fff62f3932e20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:34Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:34 crc kubenswrapper[5003]: I1206 15:32:34.083756 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://224b845cf75da20cf87e5435770288110776a0edd92c9c110d6dc539419ecb7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0df37db3b8682032a0065b4ca1cead69bf0b32cb4e2fab89c53e14eb2169b7e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:34Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:34 crc kubenswrapper[5003]: I1206 15:32:34.097843 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-j4rf7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7dc41f9e-e763-4a9a-a064-f65bc24332b9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://79eb73c778f65e6694b0a3243db40d605557c86145f9b40ce2dfcf922ec7f38f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://79eb73c778f65e6694b0a3243db40d605557c86145f9b40ce2dfcf922ec7f38f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://10cf47f05fda3c702decadc5c1cf0b6fb4df56993610f4d7bc2809e685109f6c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://10cf47f05fda3c702decadc5c1cf0b6fb4df56993610f4d7bc2809e685109f6c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a33d5c66fd9234a89ff1f28e4863f80104b5d1caccd24cafa22a4598a42906c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a33d5c66fd9234a89ff1f28e4863f80104b5d1caccd24cafa22a4598a42906c3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://895f9f2647488793d0875e8cb05a6e6515733dae00a856e6a4705702adc3a5ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://895f9f2647488793d0875e8cb05a6e6515733dae00a856e6a4705702adc3a5ed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-j4rf7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:34Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:34 crc kubenswrapper[5003]: I1206 15:32:34.110853 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:34Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:34 crc kubenswrapper[5003]: I1206 15:32:34.122642 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:24Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:24Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ab9b858ed30211c345e146452a18252752eeedcb3be8054daa2af371297cbb61\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:34Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:34 crc kubenswrapper[5003]: I1206 15:32:34.143575 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7xwd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a695d94-271c-45bc-8a89-dfdecb57ec00\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b26a01a6a0644d88028ebc041e00353091f26407f109e02e4aec1ec0e382a4c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b549c3d1ba39068a4f1ce344e0807eed3b4e73ee1902d02cebffe005697201d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5240dcf05f3d661b92408f4c39999571d023d9acdeb31d4a4a99f50e31a627b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e240d517f92fb2b8e24bb6f1c1d98869b41adf8bc3ab687a4426840c7010235e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b871ea5e9d2093579af10786c02da9d7f5afa5351933be742b67b1b682733119\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://276d6d2f680f4e0b2038d12b161e3fd3f85417b5d776b9661559b4812ead1dd9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c095b3b6f31d202807b1b8e423678c175c6f91aae8fc9318e9672edc467ab1b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c56c8c7aa570bb81c41923b20b09d6de6b278e56ca466e7e0ee3cecf113c37d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a00902f2c2c7ca56d86553f78094f40f59ea9ef125f5a388aafd6d7fd0c20d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9a00902f2c2c7ca56d86553f78094f40f59ea9ef125f5a388aafd6d7fd0c20d9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-p7xwd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:34Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:34 crc kubenswrapper[5003]: I1206 15:32:34.143978 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:34 crc kubenswrapper[5003]: I1206 15:32:34.144003 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:34 crc kubenswrapper[5003]: I1206 15:32:34.144014 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:34 crc kubenswrapper[5003]: I1206 15:32:34.144030 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:34 crc kubenswrapper[5003]: I1206 15:32:34.144041 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:34Z","lastTransitionTime":"2025-12-06T15:32:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:34 crc kubenswrapper[5003]: I1206 15:32:34.153920 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:34Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:34 crc kubenswrapper[5003]: I1206 15:32:34.162272 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-qcqkl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"956ed4a8-8918-48eb-a2f8-f35a9ae17fde\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8ae5ee47d26422cffca5f12a4ee1455dcd34c148b1b490d69b88280b4497da11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbhrw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-qcqkl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:34Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:34 crc kubenswrapper[5003]: I1206 15:32:34.173052 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-9kdpn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"350e8b9a-b7bf-4dc9-abe9-d10f7a088be3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e9b32883e20da5b298374f7eebd0a21c5ca51cca23543fc78dfe3edb04e3b661\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-46c8r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-9kdpn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:34Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:34 crc kubenswrapper[5003]: I1206 15:32:34.183836 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:34Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:34 crc kubenswrapper[5003]: I1206 15:32:34.192614 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-mdz5n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2a9b6e3b-054f-40de-9ad6-dd7eb78eaea1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cac97fc8a54c2fbd81b0abade11a987e5226084d8a4f75ddbbaad1f6c4cb9783\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wqmpg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:25Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-mdz5n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:34Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:34 crc kubenswrapper[5003]: I1206 15:32:34.202170 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-w25db" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1a047c4d-003e-4668-9b96-945eab34ab68\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b3a6db6c2c8e6a283f7b46ef28ba7310d3cfc7054cfd8cbcc290ebd0d26dcf4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6dn2k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b457ba1d45f5e1524b8539c4c7dc77b24f6d5aa8172b46962e31e4fb6723b7ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6dn2k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:26Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-w25db\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:34Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:34 crc kubenswrapper[5003]: I1206 15:32:34.213357 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5ae2d36f-5a31-4da3-aae8-0378c481f230\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9f683cdba22afa5d1d069cc32c6c83addf344c8b0ba3b39b7a2ca527408922c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c777cc5d07ff005aa8001da8a566484710f0253e4a6061e3297a884c6153a7d0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ac8291e7fa9a5ff379474802b6eae771542705aef4c443cc3123837d10dd9e5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae2215e89d65c7bd69288cda74cdd83f0349f57b47f80ff1fa03d60a484558b5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://efc65bc9bb60202069cb51eed80fb62e527399c4189554e791bab3e23993c95c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-06T15:32:20Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1206 15:32:14.957831 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1206 15:32:14.959306 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1213261896/tls.crt::/tmp/serving-cert-1213261896/tls.key\\\\\\\"\\\\nI1206 15:32:20.684925 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1206 15:32:20.690335 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1206 15:32:20.690422 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1206 15:32:20.690568 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1206 15:32:20.690599 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1206 15:32:20.700990 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1206 15:32:20.701016 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1206 15:32:20.701048 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1206 15:32:20.701062 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1206 15:32:20.701075 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1206 15:32:20.701085 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1206 15:32:20.701093 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1206 15:32:20.701100 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1206 15:32:20.702607 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:04Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6013b3a95ef99138e6ea971b51a45f65923c09435a2595bba7af91eebb28d76\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4256540a96bdcd018eb514cf70eea305513bb418afa89c8bfa5179c67822380e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4256540a96bdcd018eb514cf70eea305513bb418afa89c8bfa5179c67822380e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:34Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:34 crc kubenswrapper[5003]: I1206 15:32:34.223379 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c48c2555-6e1a-4e2a-801c-de22b016a80d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d0880a937c71f4a7b46ffc4dcc10f50f5db23655dedb5d7e95b36cf0fcb44928\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f5d5d47cea1d06df2f36a0a15126c2419e5051d84842c781921577bec3c65f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5e7cb74b8b6e2a00de6dbbd1689b52fcc3ae9862d40685bab7a805646abd4a3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3377599a5ceebbce7bd9d45a6f78122fc48d86ff917a6dcfc03304ca3361659\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:02Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:34Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:34 crc kubenswrapper[5003]: I1206 15:32:34.234039 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa744e792aa97558246eaa95c80a307138b1a6e08fe5de144b3fff62f3932e20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:34Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:34 crc kubenswrapper[5003]: I1206 15:32:34.245586 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://224b845cf75da20cf87e5435770288110776a0edd92c9c110d6dc539419ecb7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0df37db3b8682032a0065b4ca1cead69bf0b32cb4e2fab89c53e14eb2169b7e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:34Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:34 crc kubenswrapper[5003]: I1206 15:32:34.246637 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:34 crc kubenswrapper[5003]: I1206 15:32:34.246681 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:34 crc kubenswrapper[5003]: I1206 15:32:34.246691 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:34 crc kubenswrapper[5003]: I1206 15:32:34.246718 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:34 crc kubenswrapper[5003]: I1206 15:32:34.246731 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:34Z","lastTransitionTime":"2025-12-06T15:32:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:34 crc kubenswrapper[5003]: I1206 15:32:34.259077 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-j4rf7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7dc41f9e-e763-4a9a-a064-f65bc24332b9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://79eb73c778f65e6694b0a3243db40d605557c86145f9b40ce2dfcf922ec7f38f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://79eb73c778f65e6694b0a3243db40d605557c86145f9b40ce2dfcf922ec7f38f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://10cf47f05fda3c702decadc5c1cf0b6fb4df56993610f4d7bc2809e685109f6c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://10cf47f05fda3c702decadc5c1cf0b6fb4df56993610f4d7bc2809e685109f6c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a33d5c66fd9234a89ff1f28e4863f80104b5d1caccd24cafa22a4598a42906c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a33d5c66fd9234a89ff1f28e4863f80104b5d1caccd24cafa22a4598a42906c3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://895f9f2647488793d0875e8cb05a6e6515733dae00a856e6a4705702adc3a5ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://895f9f2647488793d0875e8cb05a6e6515733dae00a856e6a4705702adc3a5ed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a02f680341e0dfb569ffcd7902d5b1fe3d575a423ff541d86c7bdd4d76b733b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-j4rf7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:34Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:34 crc kubenswrapper[5003]: I1206 15:32:34.270480 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:34Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:34 crc kubenswrapper[5003]: I1206 15:32:34.283765 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:24Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:24Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ab9b858ed30211c345e146452a18252752eeedcb3be8054daa2af371297cbb61\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:34Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:34 crc kubenswrapper[5003]: I1206 15:32:34.307244 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7xwd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a695d94-271c-45bc-8a89-dfdecb57ec00\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b26a01a6a0644d88028ebc041e00353091f26407f109e02e4aec1ec0e382a4c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b549c3d1ba39068a4f1ce344e0807eed3b4e73ee1902d02cebffe005697201d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5240dcf05f3d661b92408f4c39999571d023d9acdeb31d4a4a99f50e31a627b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e240d517f92fb2b8e24bb6f1c1d98869b41adf8bc3ab687a4426840c7010235e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b871ea5e9d2093579af10786c02da9d7f5afa5351933be742b67b1b682733119\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://276d6d2f680f4e0b2038d12b161e3fd3f85417b5d776b9661559b4812ead1dd9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c095b3b6f31d202807b1b8e423678c175c6f91aae8fc9318e9672edc467ab1b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c56c8c7aa570bb81c41923b20b09d6de6b278e56ca466e7e0ee3cecf113c37d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a00902f2c2c7ca56d86553f78094f40f59ea9ef125f5a388aafd6d7fd0c20d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9a00902f2c2c7ca56d86553f78094f40f59ea9ef125f5a388aafd6d7fd0c20d9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-p7xwd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:34Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:34 crc kubenswrapper[5003]: I1206 15:32:34.349358 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:34 crc kubenswrapper[5003]: I1206 15:32:34.349417 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:34 crc kubenswrapper[5003]: I1206 15:32:34.349435 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:34 crc kubenswrapper[5003]: I1206 15:32:34.349459 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:34 crc kubenswrapper[5003]: I1206 15:32:34.349477 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:34Z","lastTransitionTime":"2025-12-06T15:32:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:34 crc kubenswrapper[5003]: I1206 15:32:34.452068 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:34 crc kubenswrapper[5003]: I1206 15:32:34.452108 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:34 crc kubenswrapper[5003]: I1206 15:32:34.452116 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:34 crc kubenswrapper[5003]: I1206 15:32:34.452129 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:34 crc kubenswrapper[5003]: I1206 15:32:34.452138 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:34Z","lastTransitionTime":"2025-12-06T15:32:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:34 crc kubenswrapper[5003]: I1206 15:32:34.555077 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:34 crc kubenswrapper[5003]: I1206 15:32:34.555160 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:34 crc kubenswrapper[5003]: I1206 15:32:34.555179 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:34 crc kubenswrapper[5003]: I1206 15:32:34.555200 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:34 crc kubenswrapper[5003]: I1206 15:32:34.555214 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:34Z","lastTransitionTime":"2025-12-06T15:32:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:34 crc kubenswrapper[5003]: I1206 15:32:34.657703 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:34 crc kubenswrapper[5003]: I1206 15:32:34.657754 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:34 crc kubenswrapper[5003]: I1206 15:32:34.657776 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:34 crc kubenswrapper[5003]: I1206 15:32:34.657798 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:34 crc kubenswrapper[5003]: I1206 15:32:34.657812 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:34Z","lastTransitionTime":"2025-12-06T15:32:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:34 crc kubenswrapper[5003]: I1206 15:32:34.760084 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:34 crc kubenswrapper[5003]: I1206 15:32:34.760141 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:34 crc kubenswrapper[5003]: I1206 15:32:34.760151 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:34 crc kubenswrapper[5003]: I1206 15:32:34.760166 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:34 crc kubenswrapper[5003]: I1206 15:32:34.760177 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:34Z","lastTransitionTime":"2025-12-06T15:32:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:34 crc kubenswrapper[5003]: I1206 15:32:34.862660 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:34 crc kubenswrapper[5003]: I1206 15:32:34.862705 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:34 crc kubenswrapper[5003]: I1206 15:32:34.862714 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:34 crc kubenswrapper[5003]: I1206 15:32:34.862730 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:34 crc kubenswrapper[5003]: I1206 15:32:34.862739 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:34Z","lastTransitionTime":"2025-12-06T15:32:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:34 crc kubenswrapper[5003]: I1206 15:32:34.915021 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:34 crc kubenswrapper[5003]: I1206 15:32:34.915099 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:34 crc kubenswrapper[5003]: I1206 15:32:34.915111 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:34 crc kubenswrapper[5003]: I1206 15:32:34.915134 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:34 crc kubenswrapper[5003]: I1206 15:32:34.915157 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:34Z","lastTransitionTime":"2025-12-06T15:32:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:34 crc kubenswrapper[5003]: E1206 15:32:34.932068 5003 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-06T15:32:34Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:34Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-06T15:32:34Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:34Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-06T15:32:34Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:34Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-06T15:32:34Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:34Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"010ec561-c8bb-454b-ab74-658add58caba\\\",\\\"systemUUID\\\":\\\"42b9e6db-6fd4-4e84-a5b7-176f28bfe2f1\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:34Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:34 crc kubenswrapper[5003]: I1206 15:32:34.936264 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:34 crc kubenswrapper[5003]: I1206 15:32:34.936305 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:34 crc kubenswrapper[5003]: I1206 15:32:34.936316 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:34 crc kubenswrapper[5003]: I1206 15:32:34.936367 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:34 crc kubenswrapper[5003]: I1206 15:32:34.936362 5003 generic.go:334] "Generic (PLEG): container finished" podID="7dc41f9e-e763-4a9a-a064-f65bc24332b9" containerID="a02f680341e0dfb569ffcd7902d5b1fe3d575a423ff541d86c7bdd4d76b733b0" exitCode=0 Dec 06 15:32:34 crc kubenswrapper[5003]: I1206 15:32:34.936383 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:34Z","lastTransitionTime":"2025-12-06T15:32:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:34 crc kubenswrapper[5003]: I1206 15:32:34.936470 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-j4rf7" event={"ID":"7dc41f9e-e763-4a9a-a064-f65bc24332b9","Type":"ContainerDied","Data":"a02f680341e0dfb569ffcd7902d5b1fe3d575a423ff541d86c7bdd4d76b733b0"} Dec 06 15:32:34 crc kubenswrapper[5003]: E1206 15:32:34.950879 5003 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-06T15:32:34Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:34Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-06T15:32:34Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:34Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-06T15:32:34Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:34Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-06T15:32:34Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:34Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"010ec561-c8bb-454b-ab74-658add58caba\\\",\\\"systemUUID\\\":\\\"42b9e6db-6fd4-4e84-a5b7-176f28bfe2f1\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:34Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:34 crc kubenswrapper[5003]: I1206 15:32:34.953323 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa744e792aa97558246eaa95c80a307138b1a6e08fe5de144b3fff62f3932e20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:34Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:34 crc kubenswrapper[5003]: I1206 15:32:34.956182 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:34 crc kubenswrapper[5003]: I1206 15:32:34.956225 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:34 crc kubenswrapper[5003]: I1206 15:32:34.956248 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:34 crc kubenswrapper[5003]: I1206 15:32:34.956265 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:34 crc kubenswrapper[5003]: I1206 15:32:34.956277 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:34Z","lastTransitionTime":"2025-12-06T15:32:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:34 crc kubenswrapper[5003]: I1206 15:32:34.969891 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://224b845cf75da20cf87e5435770288110776a0edd92c9c110d6dc539419ecb7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0df37db3b8682032a0065b4ca1cead69bf0b32cb4e2fab89c53e14eb2169b7e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:34Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:34 crc kubenswrapper[5003]: E1206 15:32:34.970121 5003 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-06T15:32:34Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:34Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-06T15:32:34Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:34Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-06T15:32:34Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:34Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-06T15:32:34Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:34Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"010ec561-c8bb-454b-ab74-658add58caba\\\",\\\"systemUUID\\\":\\\"42b9e6db-6fd4-4e84-a5b7-176f28bfe2f1\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:34Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:34 crc kubenswrapper[5003]: I1206 15:32:34.974608 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:34 crc kubenswrapper[5003]: I1206 15:32:34.974635 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:34 crc kubenswrapper[5003]: I1206 15:32:34.974644 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:34 crc kubenswrapper[5003]: I1206 15:32:34.974659 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:34 crc kubenswrapper[5003]: I1206 15:32:34.974669 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:34Z","lastTransitionTime":"2025-12-06T15:32:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:34 crc kubenswrapper[5003]: I1206 15:32:34.990866 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-j4rf7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7dc41f9e-e763-4a9a-a064-f65bc24332b9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://79eb73c778f65e6694b0a3243db40d605557c86145f9b40ce2dfcf922ec7f38f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://79eb73c778f65e6694b0a3243db40d605557c86145f9b40ce2dfcf922ec7f38f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://10cf47f05fda3c702decadc5c1cf0b6fb4df56993610f4d7bc2809e685109f6c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://10cf47f05fda3c702decadc5c1cf0b6fb4df56993610f4d7bc2809e685109f6c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a33d5c66fd9234a89ff1f28e4863f80104b5d1caccd24cafa22a4598a42906c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a33d5c66fd9234a89ff1f28e4863f80104b5d1caccd24cafa22a4598a42906c3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://895f9f2647488793d0875e8cb05a6e6515733dae00a856e6a4705702adc3a5ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://895f9f2647488793d0875e8cb05a6e6515733dae00a856e6a4705702adc3a5ed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a02f680341e0dfb569ffcd7902d5b1fe3d575a423ff541d86c7bdd4d76b733b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a02f680341e0dfb569ffcd7902d5b1fe3d575a423ff541d86c7bdd4d76b733b0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-j4rf7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:34Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:34 crc kubenswrapper[5003]: E1206 15:32:34.990982 5003 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-06T15:32:34Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:34Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-06T15:32:34Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:34Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-06T15:32:34Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:34Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-06T15:32:34Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:34Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"010ec561-c8bb-454b-ab74-658add58caba\\\",\\\"systemUUID\\\":\\\"42b9e6db-6fd4-4e84-a5b7-176f28bfe2f1\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:34Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:34 crc kubenswrapper[5003]: I1206 15:32:34.995415 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:34 crc kubenswrapper[5003]: I1206 15:32:34.995438 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:34 crc kubenswrapper[5003]: I1206 15:32:34.995446 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:34 crc kubenswrapper[5003]: I1206 15:32:34.995460 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:34 crc kubenswrapper[5003]: I1206 15:32:34.995469 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:34Z","lastTransitionTime":"2025-12-06T15:32:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:35 crc kubenswrapper[5003]: I1206 15:32:35.005527 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5ae2d36f-5a31-4da3-aae8-0378c481f230\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9f683cdba22afa5d1d069cc32c6c83addf344c8b0ba3b39b7a2ca527408922c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c777cc5d07ff005aa8001da8a566484710f0253e4a6061e3297a884c6153a7d0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ac8291e7fa9a5ff379474802b6eae771542705aef4c443cc3123837d10dd9e5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae2215e89d65c7bd69288cda74cdd83f0349f57b47f80ff1fa03d60a484558b5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://efc65bc9bb60202069cb51eed80fb62e527399c4189554e791bab3e23993c95c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-06T15:32:20Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1206 15:32:14.957831 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1206 15:32:14.959306 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1213261896/tls.crt::/tmp/serving-cert-1213261896/tls.key\\\\\\\"\\\\nI1206 15:32:20.684925 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1206 15:32:20.690335 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1206 15:32:20.690422 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1206 15:32:20.690568 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1206 15:32:20.690599 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1206 15:32:20.700990 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1206 15:32:20.701016 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1206 15:32:20.701048 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1206 15:32:20.701062 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1206 15:32:20.701075 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1206 15:32:20.701085 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1206 15:32:20.701093 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1206 15:32:20.701100 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1206 15:32:20.702607 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:04Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6013b3a95ef99138e6ea971b51a45f65923c09435a2595bba7af91eebb28d76\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4256540a96bdcd018eb514cf70eea305513bb418afa89c8bfa5179c67822380e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4256540a96bdcd018eb514cf70eea305513bb418afa89c8bfa5179c67822380e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:35Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:35 crc kubenswrapper[5003]: E1206 15:32:35.007605 5003 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-06T15:32:34Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:34Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-06T15:32:34Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:34Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-06T15:32:34Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:34Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-06T15:32:34Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:34Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"010ec561-c8bb-454b-ab74-658add58caba\\\",\\\"systemUUID\\\":\\\"42b9e6db-6fd4-4e84-a5b7-176f28bfe2f1\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:35Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:35 crc kubenswrapper[5003]: E1206 15:32:35.007750 5003 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 06 15:32:35 crc kubenswrapper[5003]: I1206 15:32:35.009736 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:35 crc kubenswrapper[5003]: I1206 15:32:35.009777 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:35 crc kubenswrapper[5003]: I1206 15:32:35.009790 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:35 crc kubenswrapper[5003]: I1206 15:32:35.009808 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:35 crc kubenswrapper[5003]: I1206 15:32:35.009819 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:35Z","lastTransitionTime":"2025-12-06T15:32:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:35 crc kubenswrapper[5003]: I1206 15:32:35.016558 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c48c2555-6e1a-4e2a-801c-de22b016a80d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d0880a937c71f4a7b46ffc4dcc10f50f5db23655dedb5d7e95b36cf0fcb44928\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f5d5d47cea1d06df2f36a0a15126c2419e5051d84842c781921577bec3c65f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5e7cb74b8b6e2a00de6dbbd1689b52fcc3ae9862d40685bab7a805646abd4a3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3377599a5ceebbce7bd9d45a6f78122fc48d86ff917a6dcfc03304ca3361659\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:02Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:35Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:35 crc kubenswrapper[5003]: I1206 15:32:35.028352 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:35Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:35 crc kubenswrapper[5003]: I1206 15:32:35.038337 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:24Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:24Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ab9b858ed30211c345e146452a18252752eeedcb3be8054daa2af371297cbb61\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:35Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:35 crc kubenswrapper[5003]: I1206 15:32:35.056862 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7xwd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a695d94-271c-45bc-8a89-dfdecb57ec00\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b26a01a6a0644d88028ebc041e00353091f26407f109e02e4aec1ec0e382a4c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b549c3d1ba39068a4f1ce344e0807eed3b4e73ee1902d02cebffe005697201d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5240dcf05f3d661b92408f4c39999571d023d9acdeb31d4a4a99f50e31a627b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e240d517f92fb2b8e24bb6f1c1d98869b41adf8bc3ab687a4426840c7010235e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b871ea5e9d2093579af10786c02da9d7f5afa5351933be742b67b1b682733119\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://276d6d2f680f4e0b2038d12b161e3fd3f85417b5d776b9661559b4812ead1dd9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c095b3b6f31d202807b1b8e423678c175c6f91aae8fc9318e9672edc467ab1b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c56c8c7aa570bb81c41923b20b09d6de6b278e56ca466e7e0ee3cecf113c37d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a00902f2c2c7ca56d86553f78094f40f59ea9ef125f5a388aafd6d7fd0c20d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9a00902f2c2c7ca56d86553f78094f40f59ea9ef125f5a388aafd6d7fd0c20d9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-p7xwd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:35Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:35 crc kubenswrapper[5003]: I1206 15:32:35.070911 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-9kdpn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"350e8b9a-b7bf-4dc9-abe9-d10f7a088be3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e9b32883e20da5b298374f7eebd0a21c5ca51cca23543fc78dfe3edb04e3b661\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-46c8r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-9kdpn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:35Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:35 crc kubenswrapper[5003]: I1206 15:32:35.083301 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:35Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:35 crc kubenswrapper[5003]: I1206 15:32:35.096743 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-qcqkl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"956ed4a8-8918-48eb-a2f8-f35a9ae17fde\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8ae5ee47d26422cffca5f12a4ee1455dcd34c148b1b490d69b88280b4497da11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbhrw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-qcqkl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:35Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:35 crc kubenswrapper[5003]: I1206 15:32:35.109503 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-w25db" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1a047c4d-003e-4668-9b96-945eab34ab68\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b3a6db6c2c8e6a283f7b46ef28ba7310d3cfc7054cfd8cbcc290ebd0d26dcf4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6dn2k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b457ba1d45f5e1524b8539c4c7dc77b24f6d5aa8172b46962e31e4fb6723b7ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6dn2k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:26Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-w25db\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:35Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:35 crc kubenswrapper[5003]: I1206 15:32:35.111938 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:35 crc kubenswrapper[5003]: I1206 15:32:35.111975 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:35 crc kubenswrapper[5003]: I1206 15:32:35.111989 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:35 crc kubenswrapper[5003]: I1206 15:32:35.112007 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:35 crc kubenswrapper[5003]: I1206 15:32:35.112021 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:35Z","lastTransitionTime":"2025-12-06T15:32:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:35 crc kubenswrapper[5003]: I1206 15:32:35.125189 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:35Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:35 crc kubenswrapper[5003]: I1206 15:32:35.134406 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-mdz5n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2a9b6e3b-054f-40de-9ad6-dd7eb78eaea1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cac97fc8a54c2fbd81b0abade11a987e5226084d8a4f75ddbbaad1f6c4cb9783\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wqmpg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:25Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-mdz5n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:35Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:35 crc kubenswrapper[5003]: I1206 15:32:35.214881 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:35 crc kubenswrapper[5003]: I1206 15:32:35.215445 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:35 crc kubenswrapper[5003]: I1206 15:32:35.215460 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:35 crc kubenswrapper[5003]: I1206 15:32:35.215480 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:35 crc kubenswrapper[5003]: I1206 15:32:35.215528 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:35Z","lastTransitionTime":"2025-12-06T15:32:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:35 crc kubenswrapper[5003]: I1206 15:32:35.318209 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:35 crc kubenswrapper[5003]: I1206 15:32:35.318285 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:35 crc kubenswrapper[5003]: I1206 15:32:35.318307 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:35 crc kubenswrapper[5003]: I1206 15:32:35.318336 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:35 crc kubenswrapper[5003]: I1206 15:32:35.318357 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:35Z","lastTransitionTime":"2025-12-06T15:32:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:35 crc kubenswrapper[5003]: I1206 15:32:35.420783 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:35 crc kubenswrapper[5003]: I1206 15:32:35.420845 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:35 crc kubenswrapper[5003]: I1206 15:32:35.420862 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:35 crc kubenswrapper[5003]: I1206 15:32:35.420886 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:35 crc kubenswrapper[5003]: I1206 15:32:35.420902 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:35Z","lastTransitionTime":"2025-12-06T15:32:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:35 crc kubenswrapper[5003]: I1206 15:32:35.524790 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:35 crc kubenswrapper[5003]: I1206 15:32:35.524866 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:35 crc kubenswrapper[5003]: I1206 15:32:35.524895 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:35 crc kubenswrapper[5003]: I1206 15:32:35.524923 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:35 crc kubenswrapper[5003]: I1206 15:32:35.524943 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:35Z","lastTransitionTime":"2025-12-06T15:32:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:35 crc kubenswrapper[5003]: I1206 15:32:35.628260 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:35 crc kubenswrapper[5003]: I1206 15:32:35.628320 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:35 crc kubenswrapper[5003]: I1206 15:32:35.628342 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:35 crc kubenswrapper[5003]: I1206 15:32:35.628372 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:35 crc kubenswrapper[5003]: I1206 15:32:35.628424 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:35Z","lastTransitionTime":"2025-12-06T15:32:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:35 crc kubenswrapper[5003]: I1206 15:32:35.711719 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 06 15:32:35 crc kubenswrapper[5003]: I1206 15:32:35.711775 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 06 15:32:35 crc kubenswrapper[5003]: I1206 15:32:35.711826 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 06 15:32:35 crc kubenswrapper[5003]: E1206 15:32:35.711962 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 06 15:32:35 crc kubenswrapper[5003]: E1206 15:32:35.712122 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 06 15:32:35 crc kubenswrapper[5003]: E1206 15:32:35.712285 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 06 15:32:35 crc kubenswrapper[5003]: I1206 15:32:35.731321 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:35 crc kubenswrapper[5003]: I1206 15:32:35.731402 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:35 crc kubenswrapper[5003]: I1206 15:32:35.731426 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:35 crc kubenswrapper[5003]: I1206 15:32:35.731454 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:35 crc kubenswrapper[5003]: I1206 15:32:35.731477 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:35Z","lastTransitionTime":"2025-12-06T15:32:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:35 crc kubenswrapper[5003]: I1206 15:32:35.834390 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:35 crc kubenswrapper[5003]: I1206 15:32:35.834441 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:35 crc kubenswrapper[5003]: I1206 15:32:35.834457 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:35 crc kubenswrapper[5003]: I1206 15:32:35.834477 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:35 crc kubenswrapper[5003]: I1206 15:32:35.834509 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:35Z","lastTransitionTime":"2025-12-06T15:32:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:35 crc kubenswrapper[5003]: I1206 15:32:35.937863 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:35 crc kubenswrapper[5003]: I1206 15:32:35.937914 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:35 crc kubenswrapper[5003]: I1206 15:32:35.937933 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:35 crc kubenswrapper[5003]: I1206 15:32:35.937957 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:35 crc kubenswrapper[5003]: I1206 15:32:35.937974 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:35Z","lastTransitionTime":"2025-12-06T15:32:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:35 crc kubenswrapper[5003]: I1206 15:32:35.949836 5003 generic.go:334] "Generic (PLEG): container finished" podID="7dc41f9e-e763-4a9a-a064-f65bc24332b9" containerID="f6cda90c763eedc34903164d30c28d3cb696f658455d0a972fbf8d8e5505a587" exitCode=0 Dec 06 15:32:35 crc kubenswrapper[5003]: I1206 15:32:35.950106 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-j4rf7" event={"ID":"7dc41f9e-e763-4a9a-a064-f65bc24332b9","Type":"ContainerDied","Data":"f6cda90c763eedc34903164d30c28d3cb696f658455d0a972fbf8d8e5505a587"} Dec 06 15:32:35 crc kubenswrapper[5003]: I1206 15:32:35.974607 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:35Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:35 crc kubenswrapper[5003]: I1206 15:32:35.997467 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-qcqkl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"956ed4a8-8918-48eb-a2f8-f35a9ae17fde\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8ae5ee47d26422cffca5f12a4ee1455dcd34c148b1b490d69b88280b4497da11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbhrw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-qcqkl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:35Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:36 crc kubenswrapper[5003]: I1206 15:32:36.016587 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-9kdpn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"350e8b9a-b7bf-4dc9-abe9-d10f7a088be3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e9b32883e20da5b298374f7eebd0a21c5ca51cca23543fc78dfe3edb04e3b661\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-46c8r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-9kdpn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:36Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:36 crc kubenswrapper[5003]: I1206 15:32:36.040531 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:36Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:36 crc kubenswrapper[5003]: I1206 15:32:36.042173 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:36 crc kubenswrapper[5003]: I1206 15:32:36.042228 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:36 crc kubenswrapper[5003]: I1206 15:32:36.042247 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:36 crc kubenswrapper[5003]: I1206 15:32:36.042271 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:36 crc kubenswrapper[5003]: I1206 15:32:36.042290 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:36Z","lastTransitionTime":"2025-12-06T15:32:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:36 crc kubenswrapper[5003]: I1206 15:32:36.053879 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-mdz5n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2a9b6e3b-054f-40de-9ad6-dd7eb78eaea1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cac97fc8a54c2fbd81b0abade11a987e5226084d8a4f75ddbbaad1f6c4cb9783\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wqmpg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:25Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-mdz5n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:36Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:36 crc kubenswrapper[5003]: I1206 15:32:36.066987 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-w25db" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1a047c4d-003e-4668-9b96-945eab34ab68\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b3a6db6c2c8e6a283f7b46ef28ba7310d3cfc7054cfd8cbcc290ebd0d26dcf4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6dn2k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b457ba1d45f5e1524b8539c4c7dc77b24f6d5aa8172b46962e31e4fb6723b7ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6dn2k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:26Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-w25db\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:36Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:36 crc kubenswrapper[5003]: I1206 15:32:36.082128 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5ae2d36f-5a31-4da3-aae8-0378c481f230\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9f683cdba22afa5d1d069cc32c6c83addf344c8b0ba3b39b7a2ca527408922c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c777cc5d07ff005aa8001da8a566484710f0253e4a6061e3297a884c6153a7d0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ac8291e7fa9a5ff379474802b6eae771542705aef4c443cc3123837d10dd9e5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae2215e89d65c7bd69288cda74cdd83f0349f57b47f80ff1fa03d60a484558b5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://efc65bc9bb60202069cb51eed80fb62e527399c4189554e791bab3e23993c95c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-06T15:32:20Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1206 15:32:14.957831 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1206 15:32:14.959306 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1213261896/tls.crt::/tmp/serving-cert-1213261896/tls.key\\\\\\\"\\\\nI1206 15:32:20.684925 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1206 15:32:20.690335 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1206 15:32:20.690422 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1206 15:32:20.690568 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1206 15:32:20.690599 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1206 15:32:20.700990 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1206 15:32:20.701016 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1206 15:32:20.701048 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1206 15:32:20.701062 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1206 15:32:20.701075 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1206 15:32:20.701085 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1206 15:32:20.701093 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1206 15:32:20.701100 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1206 15:32:20.702607 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:04Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6013b3a95ef99138e6ea971b51a45f65923c09435a2595bba7af91eebb28d76\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4256540a96bdcd018eb514cf70eea305513bb418afa89c8bfa5179c67822380e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4256540a96bdcd018eb514cf70eea305513bb418afa89c8bfa5179c67822380e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:36Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:36 crc kubenswrapper[5003]: I1206 15:32:36.094503 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c48c2555-6e1a-4e2a-801c-de22b016a80d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d0880a937c71f4a7b46ffc4dcc10f50f5db23655dedb5d7e95b36cf0fcb44928\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f5d5d47cea1d06df2f36a0a15126c2419e5051d84842c781921577bec3c65f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5e7cb74b8b6e2a00de6dbbd1689b52fcc3ae9862d40685bab7a805646abd4a3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3377599a5ceebbce7bd9d45a6f78122fc48d86ff917a6dcfc03304ca3361659\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:02Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:36Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:36 crc kubenswrapper[5003]: I1206 15:32:36.110564 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa744e792aa97558246eaa95c80a307138b1a6e08fe5de144b3fff62f3932e20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:36Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:36 crc kubenswrapper[5003]: I1206 15:32:36.129798 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://224b845cf75da20cf87e5435770288110776a0edd92c9c110d6dc539419ecb7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0df37db3b8682032a0065b4ca1cead69bf0b32cb4e2fab89c53e14eb2169b7e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:36Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:36 crc kubenswrapper[5003]: I1206 15:32:36.148138 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:36 crc kubenswrapper[5003]: I1206 15:32:36.148180 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:36 crc kubenswrapper[5003]: I1206 15:32:36.148192 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:36 crc kubenswrapper[5003]: I1206 15:32:36.148211 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:36 crc kubenswrapper[5003]: I1206 15:32:36.148224 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:36Z","lastTransitionTime":"2025-12-06T15:32:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:36 crc kubenswrapper[5003]: I1206 15:32:36.151835 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-j4rf7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7dc41f9e-e763-4a9a-a064-f65bc24332b9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://79eb73c778f65e6694b0a3243db40d605557c86145f9b40ce2dfcf922ec7f38f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://79eb73c778f65e6694b0a3243db40d605557c86145f9b40ce2dfcf922ec7f38f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://10cf47f05fda3c702decadc5c1cf0b6fb4df56993610f4d7bc2809e685109f6c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://10cf47f05fda3c702decadc5c1cf0b6fb4df56993610f4d7bc2809e685109f6c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a33d5c66fd9234a89ff1f28e4863f80104b5d1caccd24cafa22a4598a42906c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a33d5c66fd9234a89ff1f28e4863f80104b5d1caccd24cafa22a4598a42906c3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://895f9f2647488793d0875e8cb05a6e6515733dae00a856e6a4705702adc3a5ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://895f9f2647488793d0875e8cb05a6e6515733dae00a856e6a4705702adc3a5ed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a02f680341e0dfb569ffcd7902d5b1fe3d575a423ff541d86c7bdd4d76b733b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a02f680341e0dfb569ffcd7902d5b1fe3d575a423ff541d86c7bdd4d76b733b0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f6cda90c763eedc34903164d30c28d3cb696f658455d0a972fbf8d8e5505a587\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f6cda90c763eedc34903164d30c28d3cb696f658455d0a972fbf8d8e5505a587\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-j4rf7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:36Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:36 crc kubenswrapper[5003]: I1206 15:32:36.171228 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:36Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:36 crc kubenswrapper[5003]: I1206 15:32:36.188416 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:24Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:24Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ab9b858ed30211c345e146452a18252752eeedcb3be8054daa2af371297cbb61\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:36Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:36 crc kubenswrapper[5003]: I1206 15:32:36.216624 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7xwd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a695d94-271c-45bc-8a89-dfdecb57ec00\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b26a01a6a0644d88028ebc041e00353091f26407f109e02e4aec1ec0e382a4c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b549c3d1ba39068a4f1ce344e0807eed3b4e73ee1902d02cebffe005697201d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5240dcf05f3d661b92408f4c39999571d023d9acdeb31d4a4a99f50e31a627b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e240d517f92fb2b8e24bb6f1c1d98869b41adf8bc3ab687a4426840c7010235e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b871ea5e9d2093579af10786c02da9d7f5afa5351933be742b67b1b682733119\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://276d6d2f680f4e0b2038d12b161e3fd3f85417b5d776b9661559b4812ead1dd9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c095b3b6f31d202807b1b8e423678c175c6f91aae8fc9318e9672edc467ab1b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c56c8c7aa570bb81c41923b20b09d6de6b278e56ca466e7e0ee3cecf113c37d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a00902f2c2c7ca56d86553f78094f40f59ea9ef125f5a388aafd6d7fd0c20d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9a00902f2c2c7ca56d86553f78094f40f59ea9ef125f5a388aafd6d7fd0c20d9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-p7xwd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:36Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:36 crc kubenswrapper[5003]: I1206 15:32:36.250986 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:36 crc kubenswrapper[5003]: I1206 15:32:36.251212 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:36 crc kubenswrapper[5003]: I1206 15:32:36.251226 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:36 crc kubenswrapper[5003]: I1206 15:32:36.251245 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:36 crc kubenswrapper[5003]: I1206 15:32:36.251259 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:36Z","lastTransitionTime":"2025-12-06T15:32:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:36 crc kubenswrapper[5003]: I1206 15:32:36.354503 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:36 crc kubenswrapper[5003]: I1206 15:32:36.354552 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:36 crc kubenswrapper[5003]: I1206 15:32:36.354562 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:36 crc kubenswrapper[5003]: I1206 15:32:36.354577 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:36 crc kubenswrapper[5003]: I1206 15:32:36.354586 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:36Z","lastTransitionTime":"2025-12-06T15:32:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:36 crc kubenswrapper[5003]: I1206 15:32:36.456238 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:36 crc kubenswrapper[5003]: I1206 15:32:36.456271 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:36 crc kubenswrapper[5003]: I1206 15:32:36.456283 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:36 crc kubenswrapper[5003]: I1206 15:32:36.456298 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:36 crc kubenswrapper[5003]: I1206 15:32:36.456307 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:36Z","lastTransitionTime":"2025-12-06T15:32:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:36 crc kubenswrapper[5003]: I1206 15:32:36.559314 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:36 crc kubenswrapper[5003]: I1206 15:32:36.559358 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:36 crc kubenswrapper[5003]: I1206 15:32:36.559370 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:36 crc kubenswrapper[5003]: I1206 15:32:36.559386 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:36 crc kubenswrapper[5003]: I1206 15:32:36.559398 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:36Z","lastTransitionTime":"2025-12-06T15:32:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:36 crc kubenswrapper[5003]: I1206 15:32:36.662202 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:36 crc kubenswrapper[5003]: I1206 15:32:36.662247 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:36 crc kubenswrapper[5003]: I1206 15:32:36.662262 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:36 crc kubenswrapper[5003]: I1206 15:32:36.662280 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:36 crc kubenswrapper[5003]: I1206 15:32:36.662291 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:36Z","lastTransitionTime":"2025-12-06T15:32:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:36 crc kubenswrapper[5003]: I1206 15:32:36.765073 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:36 crc kubenswrapper[5003]: I1206 15:32:36.765150 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:36 crc kubenswrapper[5003]: I1206 15:32:36.765174 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:36 crc kubenswrapper[5003]: I1206 15:32:36.765205 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:36 crc kubenswrapper[5003]: I1206 15:32:36.765229 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:36Z","lastTransitionTime":"2025-12-06T15:32:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:36 crc kubenswrapper[5003]: I1206 15:32:36.867813 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:36 crc kubenswrapper[5003]: I1206 15:32:36.867887 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:36 crc kubenswrapper[5003]: I1206 15:32:36.867911 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:36 crc kubenswrapper[5003]: I1206 15:32:36.867955 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:36 crc kubenswrapper[5003]: I1206 15:32:36.867977 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:36Z","lastTransitionTime":"2025-12-06T15:32:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:36 crc kubenswrapper[5003]: I1206 15:32:36.959703 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-j4rf7" event={"ID":"7dc41f9e-e763-4a9a-a064-f65bc24332b9","Type":"ContainerStarted","Data":"467dcc8ed3936c6fb24f6c6c3e42eccf3597ce6920d12b253946a123ee22faf8"} Dec 06 15:32:36 crc kubenswrapper[5003]: I1206 15:32:36.971543 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:36 crc kubenswrapper[5003]: I1206 15:32:36.971603 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:36 crc kubenswrapper[5003]: I1206 15:32:36.971621 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:36 crc kubenswrapper[5003]: I1206 15:32:36.971643 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:36 crc kubenswrapper[5003]: I1206 15:32:36.971661 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:36Z","lastTransitionTime":"2025-12-06T15:32:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:36 crc kubenswrapper[5003]: I1206 15:32:36.980020 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:36Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:36 crc kubenswrapper[5003]: I1206 15:32:36.994989 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:24Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:24Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ab9b858ed30211c345e146452a18252752eeedcb3be8054daa2af371297cbb61\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:36Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:37 crc kubenswrapper[5003]: I1206 15:32:37.024674 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7xwd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a695d94-271c-45bc-8a89-dfdecb57ec00\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b26a01a6a0644d88028ebc041e00353091f26407f109e02e4aec1ec0e382a4c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b549c3d1ba39068a4f1ce344e0807eed3b4e73ee1902d02cebffe005697201d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5240dcf05f3d661b92408f4c39999571d023d9acdeb31d4a4a99f50e31a627b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e240d517f92fb2b8e24bb6f1c1d98869b41adf8bc3ab687a4426840c7010235e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b871ea5e9d2093579af10786c02da9d7f5afa5351933be742b67b1b682733119\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://276d6d2f680f4e0b2038d12b161e3fd3f85417b5d776b9661559b4812ead1dd9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c095b3b6f31d202807b1b8e423678c175c6f91aae8fc9318e9672edc467ab1b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c56c8c7aa570bb81c41923b20b09d6de6b278e56ca466e7e0ee3cecf113c37d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a00902f2c2c7ca56d86553f78094f40f59ea9ef125f5a388aafd6d7fd0c20d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9a00902f2c2c7ca56d86553f78094f40f59ea9ef125f5a388aafd6d7fd0c20d9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-p7xwd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:37Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:37 crc kubenswrapper[5003]: I1206 15:32:37.039528 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:37Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:37 crc kubenswrapper[5003]: I1206 15:32:37.053301 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-qcqkl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"956ed4a8-8918-48eb-a2f8-f35a9ae17fde\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8ae5ee47d26422cffca5f12a4ee1455dcd34c148b1b490d69b88280b4497da11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbhrw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-qcqkl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:37Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:37 crc kubenswrapper[5003]: I1206 15:32:37.071124 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-9kdpn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"350e8b9a-b7bf-4dc9-abe9-d10f7a088be3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e9b32883e20da5b298374f7eebd0a21c5ca51cca23543fc78dfe3edb04e3b661\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-46c8r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-9kdpn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:37Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:37 crc kubenswrapper[5003]: I1206 15:32:37.073830 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:37 crc kubenswrapper[5003]: I1206 15:32:37.073865 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:37 crc kubenswrapper[5003]: I1206 15:32:37.073876 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:37 crc kubenswrapper[5003]: I1206 15:32:37.073893 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:37 crc kubenswrapper[5003]: I1206 15:32:37.073906 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:37Z","lastTransitionTime":"2025-12-06T15:32:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:37 crc kubenswrapper[5003]: I1206 15:32:37.087525 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:37Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:37 crc kubenswrapper[5003]: I1206 15:32:37.099674 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-mdz5n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2a9b6e3b-054f-40de-9ad6-dd7eb78eaea1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cac97fc8a54c2fbd81b0abade11a987e5226084d8a4f75ddbbaad1f6c4cb9783\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wqmpg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:25Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-mdz5n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:37Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:37 crc kubenswrapper[5003]: I1206 15:32:37.111681 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-w25db" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1a047c4d-003e-4668-9b96-945eab34ab68\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b3a6db6c2c8e6a283f7b46ef28ba7310d3cfc7054cfd8cbcc290ebd0d26dcf4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6dn2k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b457ba1d45f5e1524b8539c4c7dc77b24f6d5aa8172b46962e31e4fb6723b7ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6dn2k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:26Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-w25db\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:37Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:37 crc kubenswrapper[5003]: I1206 15:32:37.128370 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5ae2d36f-5a31-4da3-aae8-0378c481f230\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9f683cdba22afa5d1d069cc32c6c83addf344c8b0ba3b39b7a2ca527408922c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c777cc5d07ff005aa8001da8a566484710f0253e4a6061e3297a884c6153a7d0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ac8291e7fa9a5ff379474802b6eae771542705aef4c443cc3123837d10dd9e5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae2215e89d65c7bd69288cda74cdd83f0349f57b47f80ff1fa03d60a484558b5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://efc65bc9bb60202069cb51eed80fb62e527399c4189554e791bab3e23993c95c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-06T15:32:20Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1206 15:32:14.957831 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1206 15:32:14.959306 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1213261896/tls.crt::/tmp/serving-cert-1213261896/tls.key\\\\\\\"\\\\nI1206 15:32:20.684925 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1206 15:32:20.690335 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1206 15:32:20.690422 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1206 15:32:20.690568 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1206 15:32:20.690599 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1206 15:32:20.700990 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1206 15:32:20.701016 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1206 15:32:20.701048 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1206 15:32:20.701062 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1206 15:32:20.701075 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1206 15:32:20.701085 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1206 15:32:20.701093 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1206 15:32:20.701100 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1206 15:32:20.702607 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:04Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6013b3a95ef99138e6ea971b51a45f65923c09435a2595bba7af91eebb28d76\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4256540a96bdcd018eb514cf70eea305513bb418afa89c8bfa5179c67822380e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4256540a96bdcd018eb514cf70eea305513bb418afa89c8bfa5179c67822380e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:37Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:37 crc kubenswrapper[5003]: I1206 15:32:37.141405 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c48c2555-6e1a-4e2a-801c-de22b016a80d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d0880a937c71f4a7b46ffc4dcc10f50f5db23655dedb5d7e95b36cf0fcb44928\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f5d5d47cea1d06df2f36a0a15126c2419e5051d84842c781921577bec3c65f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5e7cb74b8b6e2a00de6dbbd1689b52fcc3ae9862d40685bab7a805646abd4a3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3377599a5ceebbce7bd9d45a6f78122fc48d86ff917a6dcfc03304ca3361659\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:02Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:37Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:37 crc kubenswrapper[5003]: I1206 15:32:37.164110 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa744e792aa97558246eaa95c80a307138b1a6e08fe5de144b3fff62f3932e20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:37Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:37 crc kubenswrapper[5003]: I1206 15:32:37.178907 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:37 crc kubenswrapper[5003]: I1206 15:32:37.178951 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:37 crc kubenswrapper[5003]: I1206 15:32:37.178960 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:37 crc kubenswrapper[5003]: I1206 15:32:37.178974 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:37 crc kubenswrapper[5003]: I1206 15:32:37.178985 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:37Z","lastTransitionTime":"2025-12-06T15:32:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:37 crc kubenswrapper[5003]: I1206 15:32:37.183468 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://224b845cf75da20cf87e5435770288110776a0edd92c9c110d6dc539419ecb7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0df37db3b8682032a0065b4ca1cead69bf0b32cb4e2fab89c53e14eb2169b7e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:37Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:37 crc kubenswrapper[5003]: I1206 15:32:37.201393 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-j4rf7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7dc41f9e-e763-4a9a-a064-f65bc24332b9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://467dcc8ed3936c6fb24f6c6c3e42eccf3597ce6920d12b253946a123ee22faf8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://79eb73c778f65e6694b0a3243db40d605557c86145f9b40ce2dfcf922ec7f38f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://79eb73c778f65e6694b0a3243db40d605557c86145f9b40ce2dfcf922ec7f38f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://10cf47f05fda3c702decadc5c1cf0b6fb4df56993610f4d7bc2809e685109f6c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://10cf47f05fda3c702decadc5c1cf0b6fb4df56993610f4d7bc2809e685109f6c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a33d5c66fd9234a89ff1f28e4863f80104b5d1caccd24cafa22a4598a42906c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a33d5c66fd9234a89ff1f28e4863f80104b5d1caccd24cafa22a4598a42906c3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://895f9f2647488793d0875e8cb05a6e6515733dae00a856e6a4705702adc3a5ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://895f9f2647488793d0875e8cb05a6e6515733dae00a856e6a4705702adc3a5ed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a02f680341e0dfb569ffcd7902d5b1fe3d575a423ff541d86c7bdd4d76b733b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a02f680341e0dfb569ffcd7902d5b1fe3d575a423ff541d86c7bdd4d76b733b0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f6cda90c763eedc34903164d30c28d3cb696f658455d0a972fbf8d8e5505a587\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f6cda90c763eedc34903164d30c28d3cb696f658455d0a972fbf8d8e5505a587\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-j4rf7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:37Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:37 crc kubenswrapper[5003]: I1206 15:32:37.282040 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:37 crc kubenswrapper[5003]: I1206 15:32:37.282079 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:37 crc kubenswrapper[5003]: I1206 15:32:37.282091 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:37 crc kubenswrapper[5003]: I1206 15:32:37.282131 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:37 crc kubenswrapper[5003]: I1206 15:32:37.282143 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:37Z","lastTransitionTime":"2025-12-06T15:32:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:37 crc kubenswrapper[5003]: I1206 15:32:37.385136 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:37 crc kubenswrapper[5003]: I1206 15:32:37.385184 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:37 crc kubenswrapper[5003]: I1206 15:32:37.385195 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:37 crc kubenswrapper[5003]: I1206 15:32:37.385212 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:37 crc kubenswrapper[5003]: I1206 15:32:37.385224 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:37Z","lastTransitionTime":"2025-12-06T15:32:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:37 crc kubenswrapper[5003]: I1206 15:32:37.489308 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:37 crc kubenswrapper[5003]: I1206 15:32:37.489373 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:37 crc kubenswrapper[5003]: I1206 15:32:37.489398 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:37 crc kubenswrapper[5003]: I1206 15:32:37.489426 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:37 crc kubenswrapper[5003]: I1206 15:32:37.489450 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:37Z","lastTransitionTime":"2025-12-06T15:32:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:37 crc kubenswrapper[5003]: I1206 15:32:37.560741 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 06 15:32:37 crc kubenswrapper[5003]: I1206 15:32:37.560848 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 06 15:32:37 crc kubenswrapper[5003]: I1206 15:32:37.560877 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 06 15:32:37 crc kubenswrapper[5003]: I1206 15:32:37.560896 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 06 15:32:37 crc kubenswrapper[5003]: I1206 15:32:37.560914 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 06 15:32:37 crc kubenswrapper[5003]: E1206 15:32:37.561003 5003 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 06 15:32:37 crc kubenswrapper[5003]: E1206 15:32:37.561026 5003 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 06 15:32:37 crc kubenswrapper[5003]: E1206 15:32:37.561037 5003 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 06 15:32:37 crc kubenswrapper[5003]: E1206 15:32:37.561047 5003 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 06 15:32:37 crc kubenswrapper[5003]: E1206 15:32:37.561066 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-06 15:32:53.561048887 +0000 UTC m=+52.094403268 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 06 15:32:37 crc kubenswrapper[5003]: E1206 15:32:37.561083 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-06 15:32:53.561076068 +0000 UTC m=+52.094430449 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 06 15:32:37 crc kubenswrapper[5003]: E1206 15:32:37.561091 5003 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 06 15:32:37 crc kubenswrapper[5003]: E1206 15:32:37.561119 5003 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 06 15:32:37 crc kubenswrapper[5003]: E1206 15:32:37.561133 5003 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 06 15:32:37 crc kubenswrapper[5003]: E1206 15:32:37.561129 5003 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 06 15:32:37 crc kubenswrapper[5003]: E1206 15:32:37.561176 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-06 15:32:53.56115939 +0000 UTC m=+52.094513781 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 06 15:32:37 crc kubenswrapper[5003]: E1206 15:32:37.561262 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-06 15:32:53.561225292 +0000 UTC m=+52.094579723 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 06 15:32:37 crc kubenswrapper[5003]: E1206 15:32:37.561586 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-06 15:32:53.561554651 +0000 UTC m=+52.094909082 (durationBeforeRetry 16s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 15:32:37 crc kubenswrapper[5003]: I1206 15:32:37.591923 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:37 crc kubenswrapper[5003]: I1206 15:32:37.591960 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:37 crc kubenswrapper[5003]: I1206 15:32:37.591968 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:37 crc kubenswrapper[5003]: I1206 15:32:37.591980 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:37 crc kubenswrapper[5003]: I1206 15:32:37.591989 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:37Z","lastTransitionTime":"2025-12-06T15:32:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:37 crc kubenswrapper[5003]: I1206 15:32:37.695025 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:37 crc kubenswrapper[5003]: I1206 15:32:37.695068 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:37 crc kubenswrapper[5003]: I1206 15:32:37.695078 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:37 crc kubenswrapper[5003]: I1206 15:32:37.695094 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:37 crc kubenswrapper[5003]: I1206 15:32:37.695105 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:37Z","lastTransitionTime":"2025-12-06T15:32:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:37 crc kubenswrapper[5003]: I1206 15:32:37.711248 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 06 15:32:37 crc kubenswrapper[5003]: I1206 15:32:37.711287 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 06 15:32:37 crc kubenswrapper[5003]: E1206 15:32:37.711390 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 06 15:32:37 crc kubenswrapper[5003]: I1206 15:32:37.711414 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 06 15:32:37 crc kubenswrapper[5003]: E1206 15:32:37.711599 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 06 15:32:37 crc kubenswrapper[5003]: E1206 15:32:37.712382 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 06 15:32:37 crc kubenswrapper[5003]: I1206 15:32:37.797445 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:37 crc kubenswrapper[5003]: I1206 15:32:37.797484 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:37 crc kubenswrapper[5003]: I1206 15:32:37.797505 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:37 crc kubenswrapper[5003]: I1206 15:32:37.797529 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:37 crc kubenswrapper[5003]: I1206 15:32:37.797538 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:37Z","lastTransitionTime":"2025-12-06T15:32:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:37 crc kubenswrapper[5003]: I1206 15:32:37.901264 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:37 crc kubenswrapper[5003]: I1206 15:32:37.901357 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:37 crc kubenswrapper[5003]: I1206 15:32:37.901406 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:37 crc kubenswrapper[5003]: I1206 15:32:37.901430 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:37 crc kubenswrapper[5003]: I1206 15:32:37.901447 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:37Z","lastTransitionTime":"2025-12-06T15:32:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:37 crc kubenswrapper[5003]: I1206 15:32:37.966007 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-p7xwd_8a695d94-271c-45bc-8a89-dfdecb57ec00/ovnkube-controller/0.log" Dec 06 15:32:37 crc kubenswrapper[5003]: I1206 15:32:37.969684 5003 generic.go:334] "Generic (PLEG): container finished" podID="8a695d94-271c-45bc-8a89-dfdecb57ec00" containerID="3c095b3b6f31d202807b1b8e423678c175c6f91aae8fc9318e9672edc467ab1b" exitCode=1 Dec 06 15:32:37 crc kubenswrapper[5003]: I1206 15:32:37.969844 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7xwd" event={"ID":"8a695d94-271c-45bc-8a89-dfdecb57ec00","Type":"ContainerDied","Data":"3c095b3b6f31d202807b1b8e423678c175c6f91aae8fc9318e9672edc467ab1b"} Dec 06 15:32:37 crc kubenswrapper[5003]: I1206 15:32:37.971250 5003 scope.go:117] "RemoveContainer" containerID="3c095b3b6f31d202807b1b8e423678c175c6f91aae8fc9318e9672edc467ab1b" Dec 06 15:32:38 crc kubenswrapper[5003]: I1206 15:32:38.005974 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7xwd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a695d94-271c-45bc-8a89-dfdecb57ec00\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b26a01a6a0644d88028ebc041e00353091f26407f109e02e4aec1ec0e382a4c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b549c3d1ba39068a4f1ce344e0807eed3b4e73ee1902d02cebffe005697201d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5240dcf05f3d661b92408f4c39999571d023d9acdeb31d4a4a99f50e31a627b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e240d517f92fb2b8e24bb6f1c1d98869b41adf8bc3ab687a4426840c7010235e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b871ea5e9d2093579af10786c02da9d7f5afa5351933be742b67b1b682733119\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://276d6d2f680f4e0b2038d12b161e3fd3f85417b5d776b9661559b4812ead1dd9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c095b3b6f31d202807b1b8e423678c175c6f91aae8fc9318e9672edc467ab1b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c095b3b6f31d202807b1b8e423678c175c6f91aae8fc9318e9672edc467ab1b\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-06T15:32:37Z\\\",\\\"message\\\":\\\"go:140\\\\nI1206 15:32:37.183443 6276 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1206 15:32:37.183688 6276 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1206 15:32:37.184234 6276 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1206 15:32:37.184280 6276 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1206 15:32:37.184342 6276 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1206 15:32:37.184401 6276 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1206 15:32:37.184438 6276 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1206 15:32:37.184519 6276 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1206 15:32:37.184606 6276 factory.go:656] Stopping watch factory\\\\nI1206 15:32:37.184653 6276 ovnkube.go:599] Stopped ovnkube\\\\nI1206 15:32:37.184708 6276 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1206 15:32:37.184752 6276 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1206 15:32:37.184794 6276 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI1206 15:32:37.184798 6276 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1206 15:32:37.184813 6276 handler.go:208] Removed *v1.Node eve\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c56c8c7aa570bb81c41923b20b09d6de6b278e56ca466e7e0ee3cecf113c37d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a00902f2c2c7ca56d86553f78094f40f59ea9ef125f5a388aafd6d7fd0c20d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9a00902f2c2c7ca56d86553f78094f40f59ea9ef125f5a388aafd6d7fd0c20d9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-p7xwd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:38Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:38 crc kubenswrapper[5003]: I1206 15:32:38.006629 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:38 crc kubenswrapper[5003]: I1206 15:32:38.006670 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:38 crc kubenswrapper[5003]: I1206 15:32:38.006682 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:38 crc kubenswrapper[5003]: I1206 15:32:38.006701 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:38 crc kubenswrapper[5003]: I1206 15:32:38.006717 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:38Z","lastTransitionTime":"2025-12-06T15:32:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:38 crc kubenswrapper[5003]: I1206 15:32:38.023380 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:38Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:38 crc kubenswrapper[5003]: I1206 15:32:38.039520 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:24Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:24Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ab9b858ed30211c345e146452a18252752eeedcb3be8054daa2af371297cbb61\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:38Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:38 crc kubenswrapper[5003]: I1206 15:32:38.051605 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-qcqkl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"956ed4a8-8918-48eb-a2f8-f35a9ae17fde\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8ae5ee47d26422cffca5f12a4ee1455dcd34c148b1b490d69b88280b4497da11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbhrw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-qcqkl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:38Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:38 crc kubenswrapper[5003]: I1206 15:32:38.069386 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-9kdpn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"350e8b9a-b7bf-4dc9-abe9-d10f7a088be3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e9b32883e20da5b298374f7eebd0a21c5ca51cca23543fc78dfe3edb04e3b661\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-46c8r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-9kdpn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:38Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:38 crc kubenswrapper[5003]: I1206 15:32:38.083902 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:38Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:38 crc kubenswrapper[5003]: I1206 15:32:38.102114 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-mdz5n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2a9b6e3b-054f-40de-9ad6-dd7eb78eaea1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cac97fc8a54c2fbd81b0abade11a987e5226084d8a4f75ddbbaad1f6c4cb9783\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wqmpg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:25Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-mdz5n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:38Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:38 crc kubenswrapper[5003]: I1206 15:32:38.115476 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:38 crc kubenswrapper[5003]: I1206 15:32:38.115787 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:38 crc kubenswrapper[5003]: I1206 15:32:38.115923 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:38 crc kubenswrapper[5003]: I1206 15:32:38.116030 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:38 crc kubenswrapper[5003]: I1206 15:32:38.116140 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:38Z","lastTransitionTime":"2025-12-06T15:32:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:38 crc kubenswrapper[5003]: I1206 15:32:38.126895 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-w25db" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1a047c4d-003e-4668-9b96-945eab34ab68\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b3a6db6c2c8e6a283f7b46ef28ba7310d3cfc7054cfd8cbcc290ebd0d26dcf4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6dn2k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b457ba1d45f5e1524b8539c4c7dc77b24f6d5aa8172b46962e31e4fb6723b7ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6dn2k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:26Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-w25db\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:38Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:38 crc kubenswrapper[5003]: I1206 15:32:38.145610 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:38Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:38 crc kubenswrapper[5003]: I1206 15:32:38.164628 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c48c2555-6e1a-4e2a-801c-de22b016a80d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d0880a937c71f4a7b46ffc4dcc10f50f5db23655dedb5d7e95b36cf0fcb44928\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f5d5d47cea1d06df2f36a0a15126c2419e5051d84842c781921577bec3c65f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5e7cb74b8b6e2a00de6dbbd1689b52fcc3ae9862d40685bab7a805646abd4a3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3377599a5ceebbce7bd9d45a6f78122fc48d86ff917a6dcfc03304ca3361659\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:02Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:38Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:38 crc kubenswrapper[5003]: I1206 15:32:38.182968 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa744e792aa97558246eaa95c80a307138b1a6e08fe5de144b3fff62f3932e20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:38Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:38 crc kubenswrapper[5003]: I1206 15:32:38.197164 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://224b845cf75da20cf87e5435770288110776a0edd92c9c110d6dc539419ecb7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0df37db3b8682032a0065b4ca1cead69bf0b32cb4e2fab89c53e14eb2169b7e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:38Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:38 crc kubenswrapper[5003]: I1206 15:32:38.212462 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-j4rf7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7dc41f9e-e763-4a9a-a064-f65bc24332b9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://467dcc8ed3936c6fb24f6c6c3e42eccf3597ce6920d12b253946a123ee22faf8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://79eb73c778f65e6694b0a3243db40d605557c86145f9b40ce2dfcf922ec7f38f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://79eb73c778f65e6694b0a3243db40d605557c86145f9b40ce2dfcf922ec7f38f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://10cf47f05fda3c702decadc5c1cf0b6fb4df56993610f4d7bc2809e685109f6c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://10cf47f05fda3c702decadc5c1cf0b6fb4df56993610f4d7bc2809e685109f6c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a33d5c66fd9234a89ff1f28e4863f80104b5d1caccd24cafa22a4598a42906c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a33d5c66fd9234a89ff1f28e4863f80104b5d1caccd24cafa22a4598a42906c3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://895f9f2647488793d0875e8cb05a6e6515733dae00a856e6a4705702adc3a5ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://895f9f2647488793d0875e8cb05a6e6515733dae00a856e6a4705702adc3a5ed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a02f680341e0dfb569ffcd7902d5b1fe3d575a423ff541d86c7bdd4d76b733b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a02f680341e0dfb569ffcd7902d5b1fe3d575a423ff541d86c7bdd4d76b733b0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f6cda90c763eedc34903164d30c28d3cb696f658455d0a972fbf8d8e5505a587\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f6cda90c763eedc34903164d30c28d3cb696f658455d0a972fbf8d8e5505a587\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-j4rf7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:38Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:38 crc kubenswrapper[5003]: I1206 15:32:38.218499 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:38 crc kubenswrapper[5003]: I1206 15:32:38.218591 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:38 crc kubenswrapper[5003]: I1206 15:32:38.218647 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:38 crc kubenswrapper[5003]: I1206 15:32:38.218703 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:38 crc kubenswrapper[5003]: I1206 15:32:38.218755 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:38Z","lastTransitionTime":"2025-12-06T15:32:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:38 crc kubenswrapper[5003]: I1206 15:32:38.231560 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5ae2d36f-5a31-4da3-aae8-0378c481f230\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9f683cdba22afa5d1d069cc32c6c83addf344c8b0ba3b39b7a2ca527408922c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c777cc5d07ff005aa8001da8a566484710f0253e4a6061e3297a884c6153a7d0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ac8291e7fa9a5ff379474802b6eae771542705aef4c443cc3123837d10dd9e5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae2215e89d65c7bd69288cda74cdd83f0349f57b47f80ff1fa03d60a484558b5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://efc65bc9bb60202069cb51eed80fb62e527399c4189554e791bab3e23993c95c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-06T15:32:20Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1206 15:32:14.957831 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1206 15:32:14.959306 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1213261896/tls.crt::/tmp/serving-cert-1213261896/tls.key\\\\\\\"\\\\nI1206 15:32:20.684925 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1206 15:32:20.690335 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1206 15:32:20.690422 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1206 15:32:20.690568 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1206 15:32:20.690599 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1206 15:32:20.700990 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1206 15:32:20.701016 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1206 15:32:20.701048 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1206 15:32:20.701062 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1206 15:32:20.701075 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1206 15:32:20.701085 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1206 15:32:20.701093 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1206 15:32:20.701100 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1206 15:32:20.702607 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:04Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6013b3a95ef99138e6ea971b51a45f65923c09435a2595bba7af91eebb28d76\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4256540a96bdcd018eb514cf70eea305513bb418afa89c8bfa5179c67822380e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4256540a96bdcd018eb514cf70eea305513bb418afa89c8bfa5179c67822380e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:38Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:38 crc kubenswrapper[5003]: I1206 15:32:38.320958 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:38 crc kubenswrapper[5003]: I1206 15:32:38.320984 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:38 crc kubenswrapper[5003]: I1206 15:32:38.320994 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:38 crc kubenswrapper[5003]: I1206 15:32:38.321006 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:38 crc kubenswrapper[5003]: I1206 15:32:38.321015 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:38Z","lastTransitionTime":"2025-12-06T15:32:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:38 crc kubenswrapper[5003]: I1206 15:32:38.431130 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:38 crc kubenswrapper[5003]: I1206 15:32:38.431205 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:38 crc kubenswrapper[5003]: I1206 15:32:38.431224 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:38 crc kubenswrapper[5003]: I1206 15:32:38.431250 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:38 crc kubenswrapper[5003]: I1206 15:32:38.431268 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:38Z","lastTransitionTime":"2025-12-06T15:32:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:38 crc kubenswrapper[5003]: I1206 15:32:38.533470 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:38 crc kubenswrapper[5003]: I1206 15:32:38.533548 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:38 crc kubenswrapper[5003]: I1206 15:32:38.533556 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:38 crc kubenswrapper[5003]: I1206 15:32:38.533570 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:38 crc kubenswrapper[5003]: I1206 15:32:38.533579 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:38Z","lastTransitionTime":"2025-12-06T15:32:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:38 crc kubenswrapper[5003]: I1206 15:32:38.635714 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:38 crc kubenswrapper[5003]: I1206 15:32:38.635762 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:38 crc kubenswrapper[5003]: I1206 15:32:38.635775 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:38 crc kubenswrapper[5003]: I1206 15:32:38.635792 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:38 crc kubenswrapper[5003]: I1206 15:32:38.635804 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:38Z","lastTransitionTime":"2025-12-06T15:32:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:38 crc kubenswrapper[5003]: I1206 15:32:38.738376 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:38 crc kubenswrapper[5003]: I1206 15:32:38.738421 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:38 crc kubenswrapper[5003]: I1206 15:32:38.738431 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:38 crc kubenswrapper[5003]: I1206 15:32:38.738446 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:38 crc kubenswrapper[5003]: I1206 15:32:38.738457 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:38Z","lastTransitionTime":"2025-12-06T15:32:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:38 crc kubenswrapper[5003]: I1206 15:32:38.841879 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:38 crc kubenswrapper[5003]: I1206 15:32:38.841932 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:38 crc kubenswrapper[5003]: I1206 15:32:38.841946 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:38 crc kubenswrapper[5003]: I1206 15:32:38.841967 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:38 crc kubenswrapper[5003]: I1206 15:32:38.841983 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:38Z","lastTransitionTime":"2025-12-06T15:32:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:38 crc kubenswrapper[5003]: I1206 15:32:38.944327 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:38 crc kubenswrapper[5003]: I1206 15:32:38.944371 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:38 crc kubenswrapper[5003]: I1206 15:32:38.944385 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:38 crc kubenswrapper[5003]: I1206 15:32:38.944403 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:38 crc kubenswrapper[5003]: I1206 15:32:38.944414 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:38Z","lastTransitionTime":"2025-12-06T15:32:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:38 crc kubenswrapper[5003]: I1206 15:32:38.973998 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-p7xwd_8a695d94-271c-45bc-8a89-dfdecb57ec00/ovnkube-controller/0.log" Dec 06 15:32:38 crc kubenswrapper[5003]: I1206 15:32:38.976452 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7xwd" event={"ID":"8a695d94-271c-45bc-8a89-dfdecb57ec00","Type":"ContainerStarted","Data":"4c6884100d6c3b4944019e7d9e7a2bdaf2013d697ce3980863742a60a7b52d7f"} Dec 06 15:32:38 crc kubenswrapper[5003]: I1206 15:32:38.976938 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-p7xwd" Dec 06 15:32:38 crc kubenswrapper[5003]: I1206 15:32:38.989619 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c48c2555-6e1a-4e2a-801c-de22b016a80d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d0880a937c71f4a7b46ffc4dcc10f50f5db23655dedb5d7e95b36cf0fcb44928\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f5d5d47cea1d06df2f36a0a15126c2419e5051d84842c781921577bec3c65f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5e7cb74b8b6e2a00de6dbbd1689b52fcc3ae9862d40685bab7a805646abd4a3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3377599a5ceebbce7bd9d45a6f78122fc48d86ff917a6dcfc03304ca3361659\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:02Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:38Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:39 crc kubenswrapper[5003]: I1206 15:32:39.002563 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa744e792aa97558246eaa95c80a307138b1a6e08fe5de144b3fff62f3932e20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:39Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:39 crc kubenswrapper[5003]: I1206 15:32:39.017024 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://224b845cf75da20cf87e5435770288110776a0edd92c9c110d6dc539419ecb7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0df37db3b8682032a0065b4ca1cead69bf0b32cb4e2fab89c53e14eb2169b7e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:39Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:39 crc kubenswrapper[5003]: I1206 15:32:39.032794 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-j4rf7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7dc41f9e-e763-4a9a-a064-f65bc24332b9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://467dcc8ed3936c6fb24f6c6c3e42eccf3597ce6920d12b253946a123ee22faf8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://79eb73c778f65e6694b0a3243db40d605557c86145f9b40ce2dfcf922ec7f38f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://79eb73c778f65e6694b0a3243db40d605557c86145f9b40ce2dfcf922ec7f38f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://10cf47f05fda3c702decadc5c1cf0b6fb4df56993610f4d7bc2809e685109f6c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://10cf47f05fda3c702decadc5c1cf0b6fb4df56993610f4d7bc2809e685109f6c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a33d5c66fd9234a89ff1f28e4863f80104b5d1caccd24cafa22a4598a42906c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a33d5c66fd9234a89ff1f28e4863f80104b5d1caccd24cafa22a4598a42906c3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://895f9f2647488793d0875e8cb05a6e6515733dae00a856e6a4705702adc3a5ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://895f9f2647488793d0875e8cb05a6e6515733dae00a856e6a4705702adc3a5ed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a02f680341e0dfb569ffcd7902d5b1fe3d575a423ff541d86c7bdd4d76b733b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a02f680341e0dfb569ffcd7902d5b1fe3d575a423ff541d86c7bdd4d76b733b0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f6cda90c763eedc34903164d30c28d3cb696f658455d0a972fbf8d8e5505a587\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f6cda90c763eedc34903164d30c28d3cb696f658455d0a972fbf8d8e5505a587\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-j4rf7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:39Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:39 crc kubenswrapper[5003]: I1206 15:32:39.046765 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:39 crc kubenswrapper[5003]: I1206 15:32:39.046815 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:39 crc kubenswrapper[5003]: I1206 15:32:39.046828 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:39 crc kubenswrapper[5003]: I1206 15:32:39.046848 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:39 crc kubenswrapper[5003]: I1206 15:32:39.046866 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:39Z","lastTransitionTime":"2025-12-06T15:32:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:39 crc kubenswrapper[5003]: I1206 15:32:39.059302 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5ae2d36f-5a31-4da3-aae8-0378c481f230\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9f683cdba22afa5d1d069cc32c6c83addf344c8b0ba3b39b7a2ca527408922c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c777cc5d07ff005aa8001da8a566484710f0253e4a6061e3297a884c6153a7d0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ac8291e7fa9a5ff379474802b6eae771542705aef4c443cc3123837d10dd9e5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae2215e89d65c7bd69288cda74cdd83f0349f57b47f80ff1fa03d60a484558b5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://efc65bc9bb60202069cb51eed80fb62e527399c4189554e791bab3e23993c95c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-06T15:32:20Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1206 15:32:14.957831 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1206 15:32:14.959306 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1213261896/tls.crt::/tmp/serving-cert-1213261896/tls.key\\\\\\\"\\\\nI1206 15:32:20.684925 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1206 15:32:20.690335 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1206 15:32:20.690422 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1206 15:32:20.690568 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1206 15:32:20.690599 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1206 15:32:20.700990 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1206 15:32:20.701016 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1206 15:32:20.701048 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1206 15:32:20.701062 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1206 15:32:20.701075 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1206 15:32:20.701085 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1206 15:32:20.701093 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1206 15:32:20.701100 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1206 15:32:20.702607 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:04Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6013b3a95ef99138e6ea971b51a45f65923c09435a2595bba7af91eebb28d76\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4256540a96bdcd018eb514cf70eea305513bb418afa89c8bfa5179c67822380e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4256540a96bdcd018eb514cf70eea305513bb418afa89c8bfa5179c67822380e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:39Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:39 crc kubenswrapper[5003]: I1206 15:32:39.083464 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7xwd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a695d94-271c-45bc-8a89-dfdecb57ec00\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b26a01a6a0644d88028ebc041e00353091f26407f109e02e4aec1ec0e382a4c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b549c3d1ba39068a4f1ce344e0807eed3b4e73ee1902d02cebffe005697201d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5240dcf05f3d661b92408f4c39999571d023d9acdeb31d4a4a99f50e31a627b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e240d517f92fb2b8e24bb6f1c1d98869b41adf8bc3ab687a4426840c7010235e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b871ea5e9d2093579af10786c02da9d7f5afa5351933be742b67b1b682733119\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://276d6d2f680f4e0b2038d12b161e3fd3f85417b5d776b9661559b4812ead1dd9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4c6884100d6c3b4944019e7d9e7a2bdaf2013d697ce3980863742a60a7b52d7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c095b3b6f31d202807b1b8e423678c175c6f91aae8fc9318e9672edc467ab1b\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-06T15:32:37Z\\\",\\\"message\\\":\\\"go:140\\\\nI1206 15:32:37.183443 6276 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1206 15:32:37.183688 6276 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1206 15:32:37.184234 6276 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1206 15:32:37.184280 6276 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1206 15:32:37.184342 6276 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1206 15:32:37.184401 6276 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1206 15:32:37.184438 6276 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1206 15:32:37.184519 6276 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1206 15:32:37.184606 6276 factory.go:656] Stopping watch factory\\\\nI1206 15:32:37.184653 6276 ovnkube.go:599] Stopped ovnkube\\\\nI1206 15:32:37.184708 6276 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1206 15:32:37.184752 6276 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1206 15:32:37.184794 6276 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI1206 15:32:37.184798 6276 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1206 15:32:37.184813 6276 handler.go:208] Removed *v1.Node eve\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:33Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c56c8c7aa570bb81c41923b20b09d6de6b278e56ca466e7e0ee3cecf113c37d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a00902f2c2c7ca56d86553f78094f40f59ea9ef125f5a388aafd6d7fd0c20d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9a00902f2c2c7ca56d86553f78094f40f59ea9ef125f5a388aafd6d7fd0c20d9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-p7xwd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:39Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:39 crc kubenswrapper[5003]: I1206 15:32:39.094418 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:39Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:39 crc kubenswrapper[5003]: I1206 15:32:39.104389 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:24Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:24Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ab9b858ed30211c345e146452a18252752eeedcb3be8054daa2af371297cbb61\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:39Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:39 crc kubenswrapper[5003]: I1206 15:32:39.113084 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-qcqkl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"956ed4a8-8918-48eb-a2f8-f35a9ae17fde\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8ae5ee47d26422cffca5f12a4ee1455dcd34c148b1b490d69b88280b4497da11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbhrw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-qcqkl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:39Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:39 crc kubenswrapper[5003]: I1206 15:32:39.123516 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-9kdpn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"350e8b9a-b7bf-4dc9-abe9-d10f7a088be3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e9b32883e20da5b298374f7eebd0a21c5ca51cca23543fc78dfe3edb04e3b661\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-46c8r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-9kdpn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:39Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:39 crc kubenswrapper[5003]: I1206 15:32:39.133439 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:39Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:39 crc kubenswrapper[5003]: I1206 15:32:39.146233 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-mdz5n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2a9b6e3b-054f-40de-9ad6-dd7eb78eaea1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cac97fc8a54c2fbd81b0abade11a987e5226084d8a4f75ddbbaad1f6c4cb9783\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wqmpg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:25Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-mdz5n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:39Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:39 crc kubenswrapper[5003]: I1206 15:32:39.149070 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:39 crc kubenswrapper[5003]: I1206 15:32:39.149106 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:39 crc kubenswrapper[5003]: I1206 15:32:39.149115 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:39 crc kubenswrapper[5003]: I1206 15:32:39.149129 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:39 crc kubenswrapper[5003]: I1206 15:32:39.149138 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:39Z","lastTransitionTime":"2025-12-06T15:32:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:39 crc kubenswrapper[5003]: I1206 15:32:39.159611 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-w25db" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1a047c4d-003e-4668-9b96-945eab34ab68\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b3a6db6c2c8e6a283f7b46ef28ba7310d3cfc7054cfd8cbcc290ebd0d26dcf4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6dn2k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b457ba1d45f5e1524b8539c4c7dc77b24f6d5aa8172b46962e31e4fb6723b7ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6dn2k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:26Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-w25db\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:39Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:39 crc kubenswrapper[5003]: I1206 15:32:39.172816 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:39Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:39 crc kubenswrapper[5003]: I1206 15:32:39.251751 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:39 crc kubenswrapper[5003]: I1206 15:32:39.251801 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:39 crc kubenswrapper[5003]: I1206 15:32:39.251843 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:39 crc kubenswrapper[5003]: I1206 15:32:39.251862 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:39 crc kubenswrapper[5003]: I1206 15:32:39.251873 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:39Z","lastTransitionTime":"2025-12-06T15:32:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:39 crc kubenswrapper[5003]: I1206 15:32:39.354343 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:39 crc kubenswrapper[5003]: I1206 15:32:39.354402 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:39 crc kubenswrapper[5003]: I1206 15:32:39.354420 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:39 crc kubenswrapper[5003]: I1206 15:32:39.354446 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:39 crc kubenswrapper[5003]: I1206 15:32:39.354463 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:39Z","lastTransitionTime":"2025-12-06T15:32:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:39 crc kubenswrapper[5003]: I1206 15:32:39.458067 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:39 crc kubenswrapper[5003]: I1206 15:32:39.458145 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:39 crc kubenswrapper[5003]: I1206 15:32:39.458167 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:39 crc kubenswrapper[5003]: I1206 15:32:39.458189 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:39 crc kubenswrapper[5003]: I1206 15:32:39.458207 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:39Z","lastTransitionTime":"2025-12-06T15:32:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:39 crc kubenswrapper[5003]: I1206 15:32:39.561279 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:39 crc kubenswrapper[5003]: I1206 15:32:39.561349 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:39 crc kubenswrapper[5003]: I1206 15:32:39.561366 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:39 crc kubenswrapper[5003]: I1206 15:32:39.561389 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:39 crc kubenswrapper[5003]: I1206 15:32:39.561405 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:39Z","lastTransitionTime":"2025-12-06T15:32:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:39 crc kubenswrapper[5003]: I1206 15:32:39.610864 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-7788j"] Dec 06 15:32:39 crc kubenswrapper[5003]: I1206 15:32:39.611475 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-7788j" Dec 06 15:32:39 crc kubenswrapper[5003]: I1206 15:32:39.615482 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Dec 06 15:32:39 crc kubenswrapper[5003]: I1206 15:32:39.615824 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Dec 06 15:32:39 crc kubenswrapper[5003]: I1206 15:32:39.637616 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa744e792aa97558246eaa95c80a307138b1a6e08fe5de144b3fff62f3932e20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:39Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:39 crc kubenswrapper[5003]: I1206 15:32:39.650377 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://224b845cf75da20cf87e5435770288110776a0edd92c9c110d6dc539419ecb7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0df37db3b8682032a0065b4ca1cead69bf0b32cb4e2fab89c53e14eb2169b7e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:39Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:39 crc kubenswrapper[5003]: I1206 15:32:39.664292 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-j4rf7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7dc41f9e-e763-4a9a-a064-f65bc24332b9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://467dcc8ed3936c6fb24f6c6c3e42eccf3597ce6920d12b253946a123ee22faf8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://79eb73c778f65e6694b0a3243db40d605557c86145f9b40ce2dfcf922ec7f38f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://79eb73c778f65e6694b0a3243db40d605557c86145f9b40ce2dfcf922ec7f38f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://10cf47f05fda3c702decadc5c1cf0b6fb4df56993610f4d7bc2809e685109f6c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://10cf47f05fda3c702decadc5c1cf0b6fb4df56993610f4d7bc2809e685109f6c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a33d5c66fd9234a89ff1f28e4863f80104b5d1caccd24cafa22a4598a42906c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a33d5c66fd9234a89ff1f28e4863f80104b5d1caccd24cafa22a4598a42906c3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://895f9f2647488793d0875e8cb05a6e6515733dae00a856e6a4705702adc3a5ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://895f9f2647488793d0875e8cb05a6e6515733dae00a856e6a4705702adc3a5ed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a02f680341e0dfb569ffcd7902d5b1fe3d575a423ff541d86c7bdd4d76b733b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a02f680341e0dfb569ffcd7902d5b1fe3d575a423ff541d86c7bdd4d76b733b0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f6cda90c763eedc34903164d30c28d3cb696f658455d0a972fbf8d8e5505a587\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f6cda90c763eedc34903164d30c28d3cb696f658455d0a972fbf8d8e5505a587\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-j4rf7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:39Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:39 crc kubenswrapper[5003]: I1206 15:32:39.667761 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:39 crc kubenswrapper[5003]: I1206 15:32:39.667873 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:39 crc kubenswrapper[5003]: I1206 15:32:39.667890 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:39 crc kubenswrapper[5003]: I1206 15:32:39.667921 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:39 crc kubenswrapper[5003]: I1206 15:32:39.667937 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:39Z","lastTransitionTime":"2025-12-06T15:32:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:39 crc kubenswrapper[5003]: I1206 15:32:39.684392 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/f27a64b8-ecd1-4201-a3bd-a4f5a0aa05a1-env-overrides\") pod \"ovnkube-control-plane-749d76644c-7788j\" (UID: \"f27a64b8-ecd1-4201-a3bd-a4f5a0aa05a1\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-7788j" Dec 06 15:32:39 crc kubenswrapper[5003]: I1206 15:32:39.684885 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/f27a64b8-ecd1-4201-a3bd-a4f5a0aa05a1-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-7788j\" (UID: \"f27a64b8-ecd1-4201-a3bd-a4f5a0aa05a1\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-7788j" Dec 06 15:32:39 crc kubenswrapper[5003]: I1206 15:32:39.684966 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/f27a64b8-ecd1-4201-a3bd-a4f5a0aa05a1-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-7788j\" (UID: \"f27a64b8-ecd1-4201-a3bd-a4f5a0aa05a1\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-7788j" Dec 06 15:32:39 crc kubenswrapper[5003]: I1206 15:32:39.685068 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-twj7c\" (UniqueName: \"kubernetes.io/projected/f27a64b8-ecd1-4201-a3bd-a4f5a0aa05a1-kube-api-access-twj7c\") pod \"ovnkube-control-plane-749d76644c-7788j\" (UID: \"f27a64b8-ecd1-4201-a3bd-a4f5a0aa05a1\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-7788j" Dec 06 15:32:39 crc kubenswrapper[5003]: I1206 15:32:39.688211 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5ae2d36f-5a31-4da3-aae8-0378c481f230\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9f683cdba22afa5d1d069cc32c6c83addf344c8b0ba3b39b7a2ca527408922c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c777cc5d07ff005aa8001da8a566484710f0253e4a6061e3297a884c6153a7d0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ac8291e7fa9a5ff379474802b6eae771542705aef4c443cc3123837d10dd9e5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae2215e89d65c7bd69288cda74cdd83f0349f57b47f80ff1fa03d60a484558b5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://efc65bc9bb60202069cb51eed80fb62e527399c4189554e791bab3e23993c95c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-06T15:32:20Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1206 15:32:14.957831 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1206 15:32:14.959306 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1213261896/tls.crt::/tmp/serving-cert-1213261896/tls.key\\\\\\\"\\\\nI1206 15:32:20.684925 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1206 15:32:20.690335 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1206 15:32:20.690422 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1206 15:32:20.690568 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1206 15:32:20.690599 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1206 15:32:20.700990 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1206 15:32:20.701016 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1206 15:32:20.701048 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1206 15:32:20.701062 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1206 15:32:20.701075 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1206 15:32:20.701085 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1206 15:32:20.701093 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1206 15:32:20.701100 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1206 15:32:20.702607 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:04Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6013b3a95ef99138e6ea971b51a45f65923c09435a2595bba7af91eebb28d76\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4256540a96bdcd018eb514cf70eea305513bb418afa89c8bfa5179c67822380e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4256540a96bdcd018eb514cf70eea305513bb418afa89c8bfa5179c67822380e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:39Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:39 crc kubenswrapper[5003]: I1206 15:32:39.711666 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c48c2555-6e1a-4e2a-801c-de22b016a80d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d0880a937c71f4a7b46ffc4dcc10f50f5db23655dedb5d7e95b36cf0fcb44928\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f5d5d47cea1d06df2f36a0a15126c2419e5051d84842c781921577bec3c65f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5e7cb74b8b6e2a00de6dbbd1689b52fcc3ae9862d40685bab7a805646abd4a3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3377599a5ceebbce7bd9d45a6f78122fc48d86ff917a6dcfc03304ca3361659\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:02Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:39Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:39 crc kubenswrapper[5003]: I1206 15:32:39.711770 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 06 15:32:39 crc kubenswrapper[5003]: I1206 15:32:39.711816 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 06 15:32:39 crc kubenswrapper[5003]: I1206 15:32:39.711718 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 06 15:32:39 crc kubenswrapper[5003]: E1206 15:32:39.711994 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 06 15:32:39 crc kubenswrapper[5003]: E1206 15:32:39.712102 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 06 15:32:39 crc kubenswrapper[5003]: E1206 15:32:39.712173 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 06 15:32:39 crc kubenswrapper[5003]: I1206 15:32:39.738479 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:39Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:39 crc kubenswrapper[5003]: I1206 15:32:39.753898 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:24Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:24Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ab9b858ed30211c345e146452a18252752eeedcb3be8054daa2af371297cbb61\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:39Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:39 crc kubenswrapper[5003]: I1206 15:32:39.772145 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:39 crc kubenswrapper[5003]: I1206 15:32:39.772190 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:39 crc kubenswrapper[5003]: I1206 15:32:39.772202 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:39 crc kubenswrapper[5003]: I1206 15:32:39.772218 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:39 crc kubenswrapper[5003]: I1206 15:32:39.772229 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:39Z","lastTransitionTime":"2025-12-06T15:32:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:39 crc kubenswrapper[5003]: I1206 15:32:39.776693 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7xwd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a695d94-271c-45bc-8a89-dfdecb57ec00\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b26a01a6a0644d88028ebc041e00353091f26407f109e02e4aec1ec0e382a4c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b549c3d1ba39068a4f1ce344e0807eed3b4e73ee1902d02cebffe005697201d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5240dcf05f3d661b92408f4c39999571d023d9acdeb31d4a4a99f50e31a627b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e240d517f92fb2b8e24bb6f1c1d98869b41adf8bc3ab687a4426840c7010235e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b871ea5e9d2093579af10786c02da9d7f5afa5351933be742b67b1b682733119\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://276d6d2f680f4e0b2038d12b161e3fd3f85417b5d776b9661559b4812ead1dd9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4c6884100d6c3b4944019e7d9e7a2bdaf2013d697ce3980863742a60a7b52d7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c095b3b6f31d202807b1b8e423678c175c6f91aae8fc9318e9672edc467ab1b\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-06T15:32:37Z\\\",\\\"message\\\":\\\"go:140\\\\nI1206 15:32:37.183443 6276 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1206 15:32:37.183688 6276 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1206 15:32:37.184234 6276 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1206 15:32:37.184280 6276 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1206 15:32:37.184342 6276 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1206 15:32:37.184401 6276 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1206 15:32:37.184438 6276 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1206 15:32:37.184519 6276 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1206 15:32:37.184606 6276 factory.go:656] Stopping watch factory\\\\nI1206 15:32:37.184653 6276 ovnkube.go:599] Stopped ovnkube\\\\nI1206 15:32:37.184708 6276 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1206 15:32:37.184752 6276 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1206 15:32:37.184794 6276 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI1206 15:32:37.184798 6276 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1206 15:32:37.184813 6276 handler.go:208] Removed *v1.Node eve\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:33Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c56c8c7aa570bb81c41923b20b09d6de6b278e56ca466e7e0ee3cecf113c37d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a00902f2c2c7ca56d86553f78094f40f59ea9ef125f5a388aafd6d7fd0c20d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9a00902f2c2c7ca56d86553f78094f40f59ea9ef125f5a388aafd6d7fd0c20d9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-p7xwd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:39Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:39 crc kubenswrapper[5003]: I1206 15:32:39.786185 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/f27a64b8-ecd1-4201-a3bd-a4f5a0aa05a1-env-overrides\") pod \"ovnkube-control-plane-749d76644c-7788j\" (UID: \"f27a64b8-ecd1-4201-a3bd-a4f5a0aa05a1\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-7788j" Dec 06 15:32:39 crc kubenswrapper[5003]: I1206 15:32:39.786247 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/f27a64b8-ecd1-4201-a3bd-a4f5a0aa05a1-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-7788j\" (UID: \"f27a64b8-ecd1-4201-a3bd-a4f5a0aa05a1\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-7788j" Dec 06 15:32:39 crc kubenswrapper[5003]: I1206 15:32:39.786270 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/f27a64b8-ecd1-4201-a3bd-a4f5a0aa05a1-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-7788j\" (UID: \"f27a64b8-ecd1-4201-a3bd-a4f5a0aa05a1\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-7788j" Dec 06 15:32:39 crc kubenswrapper[5003]: I1206 15:32:39.786294 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-twj7c\" (UniqueName: \"kubernetes.io/projected/f27a64b8-ecd1-4201-a3bd-a4f5a0aa05a1-kube-api-access-twj7c\") pod \"ovnkube-control-plane-749d76644c-7788j\" (UID: \"f27a64b8-ecd1-4201-a3bd-a4f5a0aa05a1\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-7788j" Dec 06 15:32:39 crc kubenswrapper[5003]: I1206 15:32:39.786941 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/f27a64b8-ecd1-4201-a3bd-a4f5a0aa05a1-env-overrides\") pod \"ovnkube-control-plane-749d76644c-7788j\" (UID: \"f27a64b8-ecd1-4201-a3bd-a4f5a0aa05a1\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-7788j" Dec 06 15:32:39 crc kubenswrapper[5003]: I1206 15:32:39.787445 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/f27a64b8-ecd1-4201-a3bd-a4f5a0aa05a1-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-7788j\" (UID: \"f27a64b8-ecd1-4201-a3bd-a4f5a0aa05a1\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-7788j" Dec 06 15:32:39 crc kubenswrapper[5003]: I1206 15:32:39.797056 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-9kdpn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"350e8b9a-b7bf-4dc9-abe9-d10f7a088be3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e9b32883e20da5b298374f7eebd0a21c5ca51cca23543fc78dfe3edb04e3b661\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-46c8r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-9kdpn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:39Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:39 crc kubenswrapper[5003]: I1206 15:32:39.797326 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/f27a64b8-ecd1-4201-a3bd-a4f5a0aa05a1-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-7788j\" (UID: \"f27a64b8-ecd1-4201-a3bd-a4f5a0aa05a1\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-7788j" Dec 06 15:32:39 crc kubenswrapper[5003]: I1206 15:32:39.808741 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-twj7c\" (UniqueName: \"kubernetes.io/projected/f27a64b8-ecd1-4201-a3bd-a4f5a0aa05a1-kube-api-access-twj7c\") pod \"ovnkube-control-plane-749d76644c-7788j\" (UID: \"f27a64b8-ecd1-4201-a3bd-a4f5a0aa05a1\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-7788j" Dec 06 15:32:39 crc kubenswrapper[5003]: I1206 15:32:39.813560 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:39Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:39 crc kubenswrapper[5003]: I1206 15:32:39.827653 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-qcqkl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"956ed4a8-8918-48eb-a2f8-f35a9ae17fde\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8ae5ee47d26422cffca5f12a4ee1455dcd34c148b1b490d69b88280b4497da11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbhrw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-qcqkl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:39Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:39 crc kubenswrapper[5003]: I1206 15:32:39.842119 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-w25db" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1a047c4d-003e-4668-9b96-945eab34ab68\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b3a6db6c2c8e6a283f7b46ef28ba7310d3cfc7054cfd8cbcc290ebd0d26dcf4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6dn2k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b457ba1d45f5e1524b8539c4c7dc77b24f6d5aa8172b46962e31e4fb6723b7ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6dn2k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:26Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-w25db\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:39Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:39 crc kubenswrapper[5003]: I1206 15:32:39.857901 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-7788j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f27a64b8-ecd1-4201-a3bd-a4f5a0aa05a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:39Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:39Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twj7c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twj7c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:39Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-7788j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:39Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:39 crc kubenswrapper[5003]: I1206 15:32:39.872021 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:39Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:39 crc kubenswrapper[5003]: I1206 15:32:39.875550 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:39 crc kubenswrapper[5003]: I1206 15:32:39.875602 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:39 crc kubenswrapper[5003]: I1206 15:32:39.875622 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:39 crc kubenswrapper[5003]: I1206 15:32:39.875646 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:39 crc kubenswrapper[5003]: I1206 15:32:39.875663 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:39Z","lastTransitionTime":"2025-12-06T15:32:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:39 crc kubenswrapper[5003]: I1206 15:32:39.884303 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-mdz5n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2a9b6e3b-054f-40de-9ad6-dd7eb78eaea1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cac97fc8a54c2fbd81b0abade11a987e5226084d8a4f75ddbbaad1f6c4cb9783\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wqmpg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:25Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-mdz5n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:39Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:39 crc kubenswrapper[5003]: I1206 15:32:39.935673 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-7788j" Dec 06 15:32:39 crc kubenswrapper[5003]: W1206 15:32:39.956175 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf27a64b8_ecd1_4201_a3bd_a4f5a0aa05a1.slice/crio-d580b554973085e6976fc2cc32f8cd661eb8551363d7c8a279a4307e90b9b496 WatchSource:0}: Error finding container d580b554973085e6976fc2cc32f8cd661eb8551363d7c8a279a4307e90b9b496: Status 404 returned error can't find the container with id d580b554973085e6976fc2cc32f8cd661eb8551363d7c8a279a4307e90b9b496 Dec 06 15:32:39 crc kubenswrapper[5003]: I1206 15:32:39.978598 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:39 crc kubenswrapper[5003]: I1206 15:32:39.978656 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:39 crc kubenswrapper[5003]: I1206 15:32:39.978673 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:39 crc kubenswrapper[5003]: I1206 15:32:39.978697 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:39 crc kubenswrapper[5003]: I1206 15:32:39.978713 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:39Z","lastTransitionTime":"2025-12-06T15:32:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:39 crc kubenswrapper[5003]: I1206 15:32:39.982849 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-p7xwd_8a695d94-271c-45bc-8a89-dfdecb57ec00/ovnkube-controller/1.log" Dec 06 15:32:39 crc kubenswrapper[5003]: I1206 15:32:39.984057 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-p7xwd_8a695d94-271c-45bc-8a89-dfdecb57ec00/ovnkube-controller/0.log" Dec 06 15:32:39 crc kubenswrapper[5003]: I1206 15:32:39.988138 5003 generic.go:334] "Generic (PLEG): container finished" podID="8a695d94-271c-45bc-8a89-dfdecb57ec00" containerID="4c6884100d6c3b4944019e7d9e7a2bdaf2013d697ce3980863742a60a7b52d7f" exitCode=1 Dec 06 15:32:39 crc kubenswrapper[5003]: I1206 15:32:39.988185 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7xwd" event={"ID":"8a695d94-271c-45bc-8a89-dfdecb57ec00","Type":"ContainerDied","Data":"4c6884100d6c3b4944019e7d9e7a2bdaf2013d697ce3980863742a60a7b52d7f"} Dec 06 15:32:39 crc kubenswrapper[5003]: I1206 15:32:39.988236 5003 scope.go:117] "RemoveContainer" containerID="3c095b3b6f31d202807b1b8e423678c175c6f91aae8fc9318e9672edc467ab1b" Dec 06 15:32:39 crc kubenswrapper[5003]: I1206 15:32:39.989272 5003 scope.go:117] "RemoveContainer" containerID="4c6884100d6c3b4944019e7d9e7a2bdaf2013d697ce3980863742a60a7b52d7f" Dec 06 15:32:39 crc kubenswrapper[5003]: E1206 15:32:39.989562 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-p7xwd_openshift-ovn-kubernetes(8a695d94-271c-45bc-8a89-dfdecb57ec00)\"" pod="openshift-ovn-kubernetes/ovnkube-node-p7xwd" podUID="8a695d94-271c-45bc-8a89-dfdecb57ec00" Dec 06 15:32:39 crc kubenswrapper[5003]: I1206 15:32:39.989680 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-7788j" event={"ID":"f27a64b8-ecd1-4201-a3bd-a4f5a0aa05a1","Type":"ContainerStarted","Data":"d580b554973085e6976fc2cc32f8cd661eb8551363d7c8a279a4307e90b9b496"} Dec 06 15:32:40 crc kubenswrapper[5003]: I1206 15:32:40.001356 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:40Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:40 crc kubenswrapper[5003]: I1206 15:32:40.013549 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-qcqkl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"956ed4a8-8918-48eb-a2f8-f35a9ae17fde\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8ae5ee47d26422cffca5f12a4ee1455dcd34c148b1b490d69b88280b4497da11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbhrw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-qcqkl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:40Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:40 crc kubenswrapper[5003]: I1206 15:32:40.027918 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-9kdpn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"350e8b9a-b7bf-4dc9-abe9-d10f7a088be3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e9b32883e20da5b298374f7eebd0a21c5ca51cca23543fc78dfe3edb04e3b661\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-46c8r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-9kdpn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:40Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:40 crc kubenswrapper[5003]: I1206 15:32:40.046034 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:40Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:40 crc kubenswrapper[5003]: I1206 15:32:40.056227 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-mdz5n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2a9b6e3b-054f-40de-9ad6-dd7eb78eaea1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cac97fc8a54c2fbd81b0abade11a987e5226084d8a4f75ddbbaad1f6c4cb9783\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wqmpg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:25Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-mdz5n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:40Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:40 crc kubenswrapper[5003]: I1206 15:32:40.066554 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-w25db" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1a047c4d-003e-4668-9b96-945eab34ab68\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b3a6db6c2c8e6a283f7b46ef28ba7310d3cfc7054cfd8cbcc290ebd0d26dcf4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6dn2k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b457ba1d45f5e1524b8539c4c7dc77b24f6d5aa8172b46962e31e4fb6723b7ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6dn2k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:26Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-w25db\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:40Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:40 crc kubenswrapper[5003]: I1206 15:32:40.076458 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-7788j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f27a64b8-ecd1-4201-a3bd-a4f5a0aa05a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:39Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:39Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twj7c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twj7c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:39Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-7788j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:40Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:40 crc kubenswrapper[5003]: I1206 15:32:40.080412 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:40 crc kubenswrapper[5003]: I1206 15:32:40.080455 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:40 crc kubenswrapper[5003]: I1206 15:32:40.080468 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:40 crc kubenswrapper[5003]: I1206 15:32:40.080859 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:40 crc kubenswrapper[5003]: I1206 15:32:40.080882 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:40Z","lastTransitionTime":"2025-12-06T15:32:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:40 crc kubenswrapper[5003]: I1206 15:32:40.102057 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-j4rf7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7dc41f9e-e763-4a9a-a064-f65bc24332b9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://467dcc8ed3936c6fb24f6c6c3e42eccf3597ce6920d12b253946a123ee22faf8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://79eb73c778f65e6694b0a3243db40d605557c86145f9b40ce2dfcf922ec7f38f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://79eb73c778f65e6694b0a3243db40d605557c86145f9b40ce2dfcf922ec7f38f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://10cf47f05fda3c702decadc5c1cf0b6fb4df56993610f4d7bc2809e685109f6c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://10cf47f05fda3c702decadc5c1cf0b6fb4df56993610f4d7bc2809e685109f6c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a33d5c66fd9234a89ff1f28e4863f80104b5d1caccd24cafa22a4598a42906c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a33d5c66fd9234a89ff1f28e4863f80104b5d1caccd24cafa22a4598a42906c3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://895f9f2647488793d0875e8cb05a6e6515733dae00a856e6a4705702adc3a5ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://895f9f2647488793d0875e8cb05a6e6515733dae00a856e6a4705702adc3a5ed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a02f680341e0dfb569ffcd7902d5b1fe3d575a423ff541d86c7bdd4d76b733b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a02f680341e0dfb569ffcd7902d5b1fe3d575a423ff541d86c7bdd4d76b733b0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f6cda90c763eedc34903164d30c28d3cb696f658455d0a972fbf8d8e5505a587\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f6cda90c763eedc34903164d30c28d3cb696f658455d0a972fbf8d8e5505a587\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-j4rf7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:40Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:40 crc kubenswrapper[5003]: I1206 15:32:40.116046 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5ae2d36f-5a31-4da3-aae8-0378c481f230\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9f683cdba22afa5d1d069cc32c6c83addf344c8b0ba3b39b7a2ca527408922c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c777cc5d07ff005aa8001da8a566484710f0253e4a6061e3297a884c6153a7d0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ac8291e7fa9a5ff379474802b6eae771542705aef4c443cc3123837d10dd9e5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae2215e89d65c7bd69288cda74cdd83f0349f57b47f80ff1fa03d60a484558b5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://efc65bc9bb60202069cb51eed80fb62e527399c4189554e791bab3e23993c95c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-06T15:32:20Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1206 15:32:14.957831 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1206 15:32:14.959306 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1213261896/tls.crt::/tmp/serving-cert-1213261896/tls.key\\\\\\\"\\\\nI1206 15:32:20.684925 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1206 15:32:20.690335 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1206 15:32:20.690422 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1206 15:32:20.690568 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1206 15:32:20.690599 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1206 15:32:20.700990 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1206 15:32:20.701016 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1206 15:32:20.701048 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1206 15:32:20.701062 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1206 15:32:20.701075 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1206 15:32:20.701085 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1206 15:32:20.701093 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1206 15:32:20.701100 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1206 15:32:20.702607 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:04Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6013b3a95ef99138e6ea971b51a45f65923c09435a2595bba7af91eebb28d76\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4256540a96bdcd018eb514cf70eea305513bb418afa89c8bfa5179c67822380e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4256540a96bdcd018eb514cf70eea305513bb418afa89c8bfa5179c67822380e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:40Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:40 crc kubenswrapper[5003]: I1206 15:32:40.128756 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c48c2555-6e1a-4e2a-801c-de22b016a80d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d0880a937c71f4a7b46ffc4dcc10f50f5db23655dedb5d7e95b36cf0fcb44928\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f5d5d47cea1d06df2f36a0a15126c2419e5051d84842c781921577bec3c65f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5e7cb74b8b6e2a00de6dbbd1689b52fcc3ae9862d40685bab7a805646abd4a3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3377599a5ceebbce7bd9d45a6f78122fc48d86ff917a6dcfc03304ca3361659\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:02Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:40Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:40 crc kubenswrapper[5003]: I1206 15:32:40.141660 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa744e792aa97558246eaa95c80a307138b1a6e08fe5de144b3fff62f3932e20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:40Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:40 crc kubenswrapper[5003]: I1206 15:32:40.153684 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://224b845cf75da20cf87e5435770288110776a0edd92c9c110d6dc539419ecb7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0df37db3b8682032a0065b4ca1cead69bf0b32cb4e2fab89c53e14eb2169b7e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:40Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:40 crc kubenswrapper[5003]: I1206 15:32:40.164660 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:40Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:40 crc kubenswrapper[5003]: I1206 15:32:40.179586 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:24Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:24Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ab9b858ed30211c345e146452a18252752eeedcb3be8054daa2af371297cbb61\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:40Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:40 crc kubenswrapper[5003]: I1206 15:32:40.183052 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:40 crc kubenswrapper[5003]: I1206 15:32:40.183082 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:40 crc kubenswrapper[5003]: I1206 15:32:40.183092 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:40 crc kubenswrapper[5003]: I1206 15:32:40.183108 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:40 crc kubenswrapper[5003]: I1206 15:32:40.183122 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:40Z","lastTransitionTime":"2025-12-06T15:32:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:40 crc kubenswrapper[5003]: I1206 15:32:40.198472 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7xwd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a695d94-271c-45bc-8a89-dfdecb57ec00\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b26a01a6a0644d88028ebc041e00353091f26407f109e02e4aec1ec0e382a4c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b549c3d1ba39068a4f1ce344e0807eed3b4e73ee1902d02cebffe005697201d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5240dcf05f3d661b92408f4c39999571d023d9acdeb31d4a4a99f50e31a627b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e240d517f92fb2b8e24bb6f1c1d98869b41adf8bc3ab687a4426840c7010235e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b871ea5e9d2093579af10786c02da9d7f5afa5351933be742b67b1b682733119\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://276d6d2f680f4e0b2038d12b161e3fd3f85417b5d776b9661559b4812ead1dd9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4c6884100d6c3b4944019e7d9e7a2bdaf2013d697ce3980863742a60a7b52d7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c095b3b6f31d202807b1b8e423678c175c6f91aae8fc9318e9672edc467ab1b\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-06T15:32:37Z\\\",\\\"message\\\":\\\"go:140\\\\nI1206 15:32:37.183443 6276 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1206 15:32:37.183688 6276 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1206 15:32:37.184234 6276 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1206 15:32:37.184280 6276 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1206 15:32:37.184342 6276 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1206 15:32:37.184401 6276 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1206 15:32:37.184438 6276 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1206 15:32:37.184519 6276 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1206 15:32:37.184606 6276 factory.go:656] Stopping watch factory\\\\nI1206 15:32:37.184653 6276 ovnkube.go:599] Stopped ovnkube\\\\nI1206 15:32:37.184708 6276 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1206 15:32:37.184752 6276 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1206 15:32:37.184794 6276 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI1206 15:32:37.184798 6276 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1206 15:32:37.184813 6276 handler.go:208] Removed *v1.Node eve\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:33Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4c6884100d6c3b4944019e7d9e7a2bdaf2013d697ce3980863742a60a7b52d7f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-06T15:32:39Z\\\",\\\"message\\\":\\\"kipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}, built lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-marketplace/redhat-marketplace_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-marketplace/redhat-marketplace\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.140\\\\\\\", Port:50051, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI1206 15:32:39.139098 6445 ovnkube.go:599] Stopped ovnkube\\\\nI1206 15:32:39.139178 6445 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1206 15:32:39.139236 6445 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to s\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c56c8c7aa570bb81c41923b20b09d6de6b278e56ca466e7e0ee3cecf113c37d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a00902f2c2c7ca56d86553f78094f40f59ea9ef125f5a388aafd6d7fd0c20d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9a00902f2c2c7ca56d86553f78094f40f59ea9ef125f5a388aafd6d7fd0c20d9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-p7xwd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:40Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:40 crc kubenswrapper[5003]: I1206 15:32:40.286133 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:40 crc kubenswrapper[5003]: I1206 15:32:40.286196 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:40 crc kubenswrapper[5003]: I1206 15:32:40.286213 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:40 crc kubenswrapper[5003]: I1206 15:32:40.286237 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:40 crc kubenswrapper[5003]: I1206 15:32:40.286254 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:40Z","lastTransitionTime":"2025-12-06T15:32:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:40 crc kubenswrapper[5003]: I1206 15:32:40.388027 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:40 crc kubenswrapper[5003]: I1206 15:32:40.388067 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:40 crc kubenswrapper[5003]: I1206 15:32:40.388075 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:40 crc kubenswrapper[5003]: I1206 15:32:40.388089 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:40 crc kubenswrapper[5003]: I1206 15:32:40.388098 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:40Z","lastTransitionTime":"2025-12-06T15:32:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:40 crc kubenswrapper[5003]: I1206 15:32:40.490722 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:40 crc kubenswrapper[5003]: I1206 15:32:40.490796 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:40 crc kubenswrapper[5003]: I1206 15:32:40.490846 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:40 crc kubenswrapper[5003]: I1206 15:32:40.490875 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:40 crc kubenswrapper[5003]: I1206 15:32:40.490894 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:40Z","lastTransitionTime":"2025-12-06T15:32:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:40 crc kubenswrapper[5003]: I1206 15:32:40.593331 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:40 crc kubenswrapper[5003]: I1206 15:32:40.593370 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:40 crc kubenswrapper[5003]: I1206 15:32:40.593381 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:40 crc kubenswrapper[5003]: I1206 15:32:40.593398 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:40 crc kubenswrapper[5003]: I1206 15:32:40.593420 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:40Z","lastTransitionTime":"2025-12-06T15:32:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:40 crc kubenswrapper[5003]: I1206 15:32:40.697927 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:40 crc kubenswrapper[5003]: I1206 15:32:40.697958 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:40 crc kubenswrapper[5003]: I1206 15:32:40.697967 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:40 crc kubenswrapper[5003]: I1206 15:32:40.697980 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:40 crc kubenswrapper[5003]: I1206 15:32:40.697990 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:40Z","lastTransitionTime":"2025-12-06T15:32:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:40 crc kubenswrapper[5003]: I1206 15:32:40.801142 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:40 crc kubenswrapper[5003]: I1206 15:32:40.801217 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:40 crc kubenswrapper[5003]: I1206 15:32:40.801241 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:40 crc kubenswrapper[5003]: I1206 15:32:40.801271 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:40 crc kubenswrapper[5003]: I1206 15:32:40.801295 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:40Z","lastTransitionTime":"2025-12-06T15:32:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:40 crc kubenswrapper[5003]: I1206 15:32:40.903582 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:40 crc kubenswrapper[5003]: I1206 15:32:40.903635 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:40 crc kubenswrapper[5003]: I1206 15:32:40.903652 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:40 crc kubenswrapper[5003]: I1206 15:32:40.903675 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:40 crc kubenswrapper[5003]: I1206 15:32:40.903691 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:40Z","lastTransitionTime":"2025-12-06T15:32:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:40 crc kubenswrapper[5003]: I1206 15:32:40.997292 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-7788j" event={"ID":"f27a64b8-ecd1-4201-a3bd-a4f5a0aa05a1","Type":"ContainerStarted","Data":"f47c00cc6d341dbe1a5f3495f04fe4e695370952f6d6b209a5aaff1ace9d17e7"} Dec 06 15:32:40 crc kubenswrapper[5003]: I1206 15:32:40.997345 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-7788j" event={"ID":"f27a64b8-ecd1-4201-a3bd-a4f5a0aa05a1","Type":"ContainerStarted","Data":"4bf147399c10dd7e654abd5213c4d90e8aa9feca7f8c6032c16576a25aeace68"} Dec 06 15:32:40 crc kubenswrapper[5003]: I1206 15:32:40.999717 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-p7xwd_8a695d94-271c-45bc-8a89-dfdecb57ec00/ovnkube-controller/1.log" Dec 06 15:32:41 crc kubenswrapper[5003]: I1206 15:32:41.003742 5003 scope.go:117] "RemoveContainer" containerID="4c6884100d6c3b4944019e7d9e7a2bdaf2013d697ce3980863742a60a7b52d7f" Dec 06 15:32:41 crc kubenswrapper[5003]: E1206 15:32:41.004088 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-p7xwd_openshift-ovn-kubernetes(8a695d94-271c-45bc-8a89-dfdecb57ec00)\"" pod="openshift-ovn-kubernetes/ovnkube-node-p7xwd" podUID="8a695d94-271c-45bc-8a89-dfdecb57ec00" Dec 06 15:32:41 crc kubenswrapper[5003]: I1206 15:32:41.005760 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:41 crc kubenswrapper[5003]: I1206 15:32:41.005880 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:41 crc kubenswrapper[5003]: I1206 15:32:41.005982 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:41 crc kubenswrapper[5003]: I1206 15:32:41.006072 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:41 crc kubenswrapper[5003]: I1206 15:32:41.006151 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:41Z","lastTransitionTime":"2025-12-06T15:32:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:41 crc kubenswrapper[5003]: I1206 15:32:41.016244 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-w25db" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1a047c4d-003e-4668-9b96-945eab34ab68\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b3a6db6c2c8e6a283f7b46ef28ba7310d3cfc7054cfd8cbcc290ebd0d26dcf4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6dn2k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b457ba1d45f5e1524b8539c4c7dc77b24f6d5aa8172b46962e31e4fb6723b7ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6dn2k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:26Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-w25db\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:41Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:41 crc kubenswrapper[5003]: I1206 15:32:41.032722 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-7788j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f27a64b8-ecd1-4201-a3bd-a4f5a0aa05a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4bf147399c10dd7e654abd5213c4d90e8aa9feca7f8c6032c16576a25aeace68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twj7c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f47c00cc6d341dbe1a5f3495f04fe4e695370952f6d6b209a5aaff1ace9d17e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twj7c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:39Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-7788j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:41Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:41 crc kubenswrapper[5003]: I1206 15:32:41.045232 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:41Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:41 crc kubenswrapper[5003]: I1206 15:32:41.054801 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-mdz5n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2a9b6e3b-054f-40de-9ad6-dd7eb78eaea1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cac97fc8a54c2fbd81b0abade11a987e5226084d8a4f75ddbbaad1f6c4cb9783\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wqmpg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:25Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-mdz5n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:41Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:41 crc kubenswrapper[5003]: I1206 15:32:41.063857 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/network-metrics-daemon-jmzd9"] Dec 06 15:32:41 crc kubenswrapper[5003]: I1206 15:32:41.064388 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-jmzd9" Dec 06 15:32:41 crc kubenswrapper[5003]: E1206 15:32:41.064455 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-jmzd9" podUID="9fa121e1-7f2f-4912-945f-86cb199c3014" Dec 06 15:32:41 crc kubenswrapper[5003]: I1206 15:32:41.070079 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa744e792aa97558246eaa95c80a307138b1a6e08fe5de144b3fff62f3932e20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:41Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:41 crc kubenswrapper[5003]: I1206 15:32:41.081721 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://224b845cf75da20cf87e5435770288110776a0edd92c9c110d6dc539419ecb7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0df37db3b8682032a0065b4ca1cead69bf0b32cb4e2fab89c53e14eb2169b7e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:41Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:41 crc kubenswrapper[5003]: I1206 15:32:41.095610 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-j4rf7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7dc41f9e-e763-4a9a-a064-f65bc24332b9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://467dcc8ed3936c6fb24f6c6c3e42eccf3597ce6920d12b253946a123ee22faf8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://79eb73c778f65e6694b0a3243db40d605557c86145f9b40ce2dfcf922ec7f38f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://79eb73c778f65e6694b0a3243db40d605557c86145f9b40ce2dfcf922ec7f38f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://10cf47f05fda3c702decadc5c1cf0b6fb4df56993610f4d7bc2809e685109f6c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://10cf47f05fda3c702decadc5c1cf0b6fb4df56993610f4d7bc2809e685109f6c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a33d5c66fd9234a89ff1f28e4863f80104b5d1caccd24cafa22a4598a42906c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a33d5c66fd9234a89ff1f28e4863f80104b5d1caccd24cafa22a4598a42906c3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://895f9f2647488793d0875e8cb05a6e6515733dae00a856e6a4705702adc3a5ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://895f9f2647488793d0875e8cb05a6e6515733dae00a856e6a4705702adc3a5ed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a02f680341e0dfb569ffcd7902d5b1fe3d575a423ff541d86c7bdd4d76b733b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a02f680341e0dfb569ffcd7902d5b1fe3d575a423ff541d86c7bdd4d76b733b0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f6cda90c763eedc34903164d30c28d3cb696f658455d0a972fbf8d8e5505a587\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f6cda90c763eedc34903164d30c28d3cb696f658455d0a972fbf8d8e5505a587\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-j4rf7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:41Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:41 crc kubenswrapper[5003]: I1206 15:32:41.108209 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:41 crc kubenswrapper[5003]: I1206 15:32:41.108245 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:41 crc kubenswrapper[5003]: I1206 15:32:41.108257 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:41 crc kubenswrapper[5003]: I1206 15:32:41.108275 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:41 crc kubenswrapper[5003]: I1206 15:32:41.108286 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:41Z","lastTransitionTime":"2025-12-06T15:32:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:41 crc kubenswrapper[5003]: I1206 15:32:41.111880 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5ae2d36f-5a31-4da3-aae8-0378c481f230\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9f683cdba22afa5d1d069cc32c6c83addf344c8b0ba3b39b7a2ca527408922c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c777cc5d07ff005aa8001da8a566484710f0253e4a6061e3297a884c6153a7d0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ac8291e7fa9a5ff379474802b6eae771542705aef4c443cc3123837d10dd9e5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae2215e89d65c7bd69288cda74cdd83f0349f57b47f80ff1fa03d60a484558b5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://efc65bc9bb60202069cb51eed80fb62e527399c4189554e791bab3e23993c95c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-06T15:32:20Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1206 15:32:14.957831 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1206 15:32:14.959306 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1213261896/tls.crt::/tmp/serving-cert-1213261896/tls.key\\\\\\\"\\\\nI1206 15:32:20.684925 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1206 15:32:20.690335 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1206 15:32:20.690422 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1206 15:32:20.690568 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1206 15:32:20.690599 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1206 15:32:20.700990 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1206 15:32:20.701016 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1206 15:32:20.701048 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1206 15:32:20.701062 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1206 15:32:20.701075 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1206 15:32:20.701085 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1206 15:32:20.701093 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1206 15:32:20.701100 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1206 15:32:20.702607 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:04Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6013b3a95ef99138e6ea971b51a45f65923c09435a2595bba7af91eebb28d76\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4256540a96bdcd018eb514cf70eea305513bb418afa89c8bfa5179c67822380e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4256540a96bdcd018eb514cf70eea305513bb418afa89c8bfa5179c67822380e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:41Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:41 crc kubenswrapper[5003]: I1206 15:32:41.127378 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c48c2555-6e1a-4e2a-801c-de22b016a80d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d0880a937c71f4a7b46ffc4dcc10f50f5db23655dedb5d7e95b36cf0fcb44928\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f5d5d47cea1d06df2f36a0a15126c2419e5051d84842c781921577bec3c65f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5e7cb74b8b6e2a00de6dbbd1689b52fcc3ae9862d40685bab7a805646abd4a3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3377599a5ceebbce7bd9d45a6f78122fc48d86ff917a6dcfc03304ca3361659\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:02Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:41Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:41 crc kubenswrapper[5003]: I1206 15:32:41.141737 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:41Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:41 crc kubenswrapper[5003]: I1206 15:32:41.154617 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:24Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:24Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ab9b858ed30211c345e146452a18252752eeedcb3be8054daa2af371297cbb61\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:41Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:41 crc kubenswrapper[5003]: I1206 15:32:41.182471 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7xwd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a695d94-271c-45bc-8a89-dfdecb57ec00\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b26a01a6a0644d88028ebc041e00353091f26407f109e02e4aec1ec0e382a4c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b549c3d1ba39068a4f1ce344e0807eed3b4e73ee1902d02cebffe005697201d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5240dcf05f3d661b92408f4c39999571d023d9acdeb31d4a4a99f50e31a627b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e240d517f92fb2b8e24bb6f1c1d98869b41adf8bc3ab687a4426840c7010235e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b871ea5e9d2093579af10786c02da9d7f5afa5351933be742b67b1b682733119\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://276d6d2f680f4e0b2038d12b161e3fd3f85417b5d776b9661559b4812ead1dd9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4c6884100d6c3b4944019e7d9e7a2bdaf2013d697ce3980863742a60a7b52d7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c095b3b6f31d202807b1b8e423678c175c6f91aae8fc9318e9672edc467ab1b\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-06T15:32:37Z\\\",\\\"message\\\":\\\"go:140\\\\nI1206 15:32:37.183443 6276 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1206 15:32:37.183688 6276 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1206 15:32:37.184234 6276 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1206 15:32:37.184280 6276 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1206 15:32:37.184342 6276 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1206 15:32:37.184401 6276 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1206 15:32:37.184438 6276 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1206 15:32:37.184519 6276 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1206 15:32:37.184606 6276 factory.go:656] Stopping watch factory\\\\nI1206 15:32:37.184653 6276 ovnkube.go:599] Stopped ovnkube\\\\nI1206 15:32:37.184708 6276 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1206 15:32:37.184752 6276 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1206 15:32:37.184794 6276 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI1206 15:32:37.184798 6276 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1206 15:32:37.184813 6276 handler.go:208] Removed *v1.Node eve\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:33Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4c6884100d6c3b4944019e7d9e7a2bdaf2013d697ce3980863742a60a7b52d7f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-06T15:32:39Z\\\",\\\"message\\\":\\\"kipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}, built lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-marketplace/redhat-marketplace_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-marketplace/redhat-marketplace\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.140\\\\\\\", Port:50051, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI1206 15:32:39.139098 6445 ovnkube.go:599] Stopped ovnkube\\\\nI1206 15:32:39.139178 6445 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1206 15:32:39.139236 6445 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to s\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c56c8c7aa570bb81c41923b20b09d6de6b278e56ca466e7e0ee3cecf113c37d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a00902f2c2c7ca56d86553f78094f40f59ea9ef125f5a388aafd6d7fd0c20d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9a00902f2c2c7ca56d86553f78094f40f59ea9ef125f5a388aafd6d7fd0c20d9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-p7xwd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:41Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:41 crc kubenswrapper[5003]: I1206 15:32:41.198603 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-9kdpn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"350e8b9a-b7bf-4dc9-abe9-d10f7a088be3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e9b32883e20da5b298374f7eebd0a21c5ca51cca23543fc78dfe3edb04e3b661\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-46c8r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-9kdpn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:41Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:41 crc kubenswrapper[5003]: I1206 15:32:41.203641 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9s4lw\" (UniqueName: \"kubernetes.io/projected/9fa121e1-7f2f-4912-945f-86cb199c3014-kube-api-access-9s4lw\") pod \"network-metrics-daemon-jmzd9\" (UID: \"9fa121e1-7f2f-4912-945f-86cb199c3014\") " pod="openshift-multus/network-metrics-daemon-jmzd9" Dec 06 15:32:41 crc kubenswrapper[5003]: I1206 15:32:41.203682 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/9fa121e1-7f2f-4912-945f-86cb199c3014-metrics-certs\") pod \"network-metrics-daemon-jmzd9\" (UID: \"9fa121e1-7f2f-4912-945f-86cb199c3014\") " pod="openshift-multus/network-metrics-daemon-jmzd9" Dec 06 15:32:41 crc kubenswrapper[5003]: I1206 15:32:41.210967 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:41 crc kubenswrapper[5003]: I1206 15:32:41.210996 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:41 crc kubenswrapper[5003]: I1206 15:32:41.211004 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:41 crc kubenswrapper[5003]: I1206 15:32:41.211017 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:41 crc kubenswrapper[5003]: I1206 15:32:41.211026 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:41Z","lastTransitionTime":"2025-12-06T15:32:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:41 crc kubenswrapper[5003]: I1206 15:32:41.216253 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:41Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:41 crc kubenswrapper[5003]: I1206 15:32:41.227731 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-qcqkl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"956ed4a8-8918-48eb-a2f8-f35a9ae17fde\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8ae5ee47d26422cffca5f12a4ee1455dcd34c148b1b490d69b88280b4497da11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbhrw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-qcqkl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:41Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:41 crc kubenswrapper[5003]: I1206 15:32:41.240928 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:41Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:41 crc kubenswrapper[5003]: I1206 15:32:41.257916 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-qcqkl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"956ed4a8-8918-48eb-a2f8-f35a9ae17fde\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8ae5ee47d26422cffca5f12a4ee1455dcd34c148b1b490d69b88280b4497da11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbhrw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-qcqkl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:41Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:41 crc kubenswrapper[5003]: I1206 15:32:41.271980 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-9kdpn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"350e8b9a-b7bf-4dc9-abe9-d10f7a088be3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e9b32883e20da5b298374f7eebd0a21c5ca51cca23543fc78dfe3edb04e3b661\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-46c8r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-9kdpn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:41Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:41 crc kubenswrapper[5003]: I1206 15:32:41.284267 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-jmzd9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9fa121e1-7f2f-4912-945f-86cb199c3014\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:41Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:41Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9s4lw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9s4lw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:41Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-jmzd9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:41Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:41 crc kubenswrapper[5003]: I1206 15:32:41.296585 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:41Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:41 crc kubenswrapper[5003]: I1206 15:32:41.304680 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/9fa121e1-7f2f-4912-945f-86cb199c3014-metrics-certs\") pod \"network-metrics-daemon-jmzd9\" (UID: \"9fa121e1-7f2f-4912-945f-86cb199c3014\") " pod="openshift-multus/network-metrics-daemon-jmzd9" Dec 06 15:32:41 crc kubenswrapper[5003]: I1206 15:32:41.304751 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9s4lw\" (UniqueName: \"kubernetes.io/projected/9fa121e1-7f2f-4912-945f-86cb199c3014-kube-api-access-9s4lw\") pod \"network-metrics-daemon-jmzd9\" (UID: \"9fa121e1-7f2f-4912-945f-86cb199c3014\") " pod="openshift-multus/network-metrics-daemon-jmzd9" Dec 06 15:32:41 crc kubenswrapper[5003]: E1206 15:32:41.304868 5003 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 06 15:32:41 crc kubenswrapper[5003]: E1206 15:32:41.304930 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/9fa121e1-7f2f-4912-945f-86cb199c3014-metrics-certs podName:9fa121e1-7f2f-4912-945f-86cb199c3014 nodeName:}" failed. No retries permitted until 2025-12-06 15:32:41.804912837 +0000 UTC m=+40.338267238 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/9fa121e1-7f2f-4912-945f-86cb199c3014-metrics-certs") pod "network-metrics-daemon-jmzd9" (UID: "9fa121e1-7f2f-4912-945f-86cb199c3014") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 06 15:32:41 crc kubenswrapper[5003]: I1206 15:32:41.307244 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-mdz5n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2a9b6e3b-054f-40de-9ad6-dd7eb78eaea1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cac97fc8a54c2fbd81b0abade11a987e5226084d8a4f75ddbbaad1f6c4cb9783\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wqmpg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:25Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-mdz5n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:41Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:41 crc kubenswrapper[5003]: I1206 15:32:41.342181 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:41 crc kubenswrapper[5003]: I1206 15:32:41.342205 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:41 crc kubenswrapper[5003]: I1206 15:32:41.342212 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:41 crc kubenswrapper[5003]: I1206 15:32:41.342225 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:41 crc kubenswrapper[5003]: I1206 15:32:41.342264 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:41Z","lastTransitionTime":"2025-12-06T15:32:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:41 crc kubenswrapper[5003]: I1206 15:32:41.345281 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9s4lw\" (UniqueName: \"kubernetes.io/projected/9fa121e1-7f2f-4912-945f-86cb199c3014-kube-api-access-9s4lw\") pod \"network-metrics-daemon-jmzd9\" (UID: \"9fa121e1-7f2f-4912-945f-86cb199c3014\") " pod="openshift-multus/network-metrics-daemon-jmzd9" Dec 06 15:32:41 crc kubenswrapper[5003]: I1206 15:32:41.354367 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-w25db" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1a047c4d-003e-4668-9b96-945eab34ab68\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b3a6db6c2c8e6a283f7b46ef28ba7310d3cfc7054cfd8cbcc290ebd0d26dcf4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6dn2k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b457ba1d45f5e1524b8539c4c7dc77b24f6d5aa8172b46962e31e4fb6723b7ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6dn2k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:26Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-w25db\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:41Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:41 crc kubenswrapper[5003]: I1206 15:32:41.367068 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-7788j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f27a64b8-ecd1-4201-a3bd-a4f5a0aa05a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4bf147399c10dd7e654abd5213c4d90e8aa9feca7f8c6032c16576a25aeace68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twj7c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f47c00cc6d341dbe1a5f3495f04fe4e695370952f6d6b209a5aaff1ace9d17e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twj7c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:39Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-7788j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:41Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:41 crc kubenswrapper[5003]: I1206 15:32:41.380276 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c48c2555-6e1a-4e2a-801c-de22b016a80d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d0880a937c71f4a7b46ffc4dcc10f50f5db23655dedb5d7e95b36cf0fcb44928\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f5d5d47cea1d06df2f36a0a15126c2419e5051d84842c781921577bec3c65f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5e7cb74b8b6e2a00de6dbbd1689b52fcc3ae9862d40685bab7a805646abd4a3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3377599a5ceebbce7bd9d45a6f78122fc48d86ff917a6dcfc03304ca3361659\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:02Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:41Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:41 crc kubenswrapper[5003]: I1206 15:32:41.392637 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa744e792aa97558246eaa95c80a307138b1a6e08fe5de144b3fff62f3932e20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:41Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:41 crc kubenswrapper[5003]: I1206 15:32:41.410878 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://224b845cf75da20cf87e5435770288110776a0edd92c9c110d6dc539419ecb7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0df37db3b8682032a0065b4ca1cead69bf0b32cb4e2fab89c53e14eb2169b7e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:41Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:41 crc kubenswrapper[5003]: I1206 15:32:41.432318 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-j4rf7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7dc41f9e-e763-4a9a-a064-f65bc24332b9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://467dcc8ed3936c6fb24f6c6c3e42eccf3597ce6920d12b253946a123ee22faf8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://79eb73c778f65e6694b0a3243db40d605557c86145f9b40ce2dfcf922ec7f38f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://79eb73c778f65e6694b0a3243db40d605557c86145f9b40ce2dfcf922ec7f38f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://10cf47f05fda3c702decadc5c1cf0b6fb4df56993610f4d7bc2809e685109f6c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://10cf47f05fda3c702decadc5c1cf0b6fb4df56993610f4d7bc2809e685109f6c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a33d5c66fd9234a89ff1f28e4863f80104b5d1caccd24cafa22a4598a42906c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a33d5c66fd9234a89ff1f28e4863f80104b5d1caccd24cafa22a4598a42906c3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://895f9f2647488793d0875e8cb05a6e6515733dae00a856e6a4705702adc3a5ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://895f9f2647488793d0875e8cb05a6e6515733dae00a856e6a4705702adc3a5ed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a02f680341e0dfb569ffcd7902d5b1fe3d575a423ff541d86c7bdd4d76b733b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a02f680341e0dfb569ffcd7902d5b1fe3d575a423ff541d86c7bdd4d76b733b0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f6cda90c763eedc34903164d30c28d3cb696f658455d0a972fbf8d8e5505a587\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f6cda90c763eedc34903164d30c28d3cb696f658455d0a972fbf8d8e5505a587\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-j4rf7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:41Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:41 crc kubenswrapper[5003]: I1206 15:32:41.445117 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:41 crc kubenswrapper[5003]: I1206 15:32:41.445176 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:41 crc kubenswrapper[5003]: I1206 15:32:41.445200 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:41 crc kubenswrapper[5003]: I1206 15:32:41.445234 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:41 crc kubenswrapper[5003]: I1206 15:32:41.445257 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:41Z","lastTransitionTime":"2025-12-06T15:32:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:41 crc kubenswrapper[5003]: I1206 15:32:41.453928 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5ae2d36f-5a31-4da3-aae8-0378c481f230\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9f683cdba22afa5d1d069cc32c6c83addf344c8b0ba3b39b7a2ca527408922c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c777cc5d07ff005aa8001da8a566484710f0253e4a6061e3297a884c6153a7d0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ac8291e7fa9a5ff379474802b6eae771542705aef4c443cc3123837d10dd9e5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae2215e89d65c7bd69288cda74cdd83f0349f57b47f80ff1fa03d60a484558b5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://efc65bc9bb60202069cb51eed80fb62e527399c4189554e791bab3e23993c95c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-06T15:32:20Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1206 15:32:14.957831 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1206 15:32:14.959306 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1213261896/tls.crt::/tmp/serving-cert-1213261896/tls.key\\\\\\\"\\\\nI1206 15:32:20.684925 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1206 15:32:20.690335 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1206 15:32:20.690422 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1206 15:32:20.690568 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1206 15:32:20.690599 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1206 15:32:20.700990 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1206 15:32:20.701016 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1206 15:32:20.701048 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1206 15:32:20.701062 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1206 15:32:20.701075 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1206 15:32:20.701085 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1206 15:32:20.701093 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1206 15:32:20.701100 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1206 15:32:20.702607 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:04Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6013b3a95ef99138e6ea971b51a45f65923c09435a2595bba7af91eebb28d76\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4256540a96bdcd018eb514cf70eea305513bb418afa89c8bfa5179c67822380e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4256540a96bdcd018eb514cf70eea305513bb418afa89c8bfa5179c67822380e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:41Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:41 crc kubenswrapper[5003]: I1206 15:32:41.468574 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:24Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:24Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ab9b858ed30211c345e146452a18252752eeedcb3be8054daa2af371297cbb61\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:41Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:41 crc kubenswrapper[5003]: I1206 15:32:41.500535 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7xwd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a695d94-271c-45bc-8a89-dfdecb57ec00\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b26a01a6a0644d88028ebc041e00353091f26407f109e02e4aec1ec0e382a4c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b549c3d1ba39068a4f1ce344e0807eed3b4e73ee1902d02cebffe005697201d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5240dcf05f3d661b92408f4c39999571d023d9acdeb31d4a4a99f50e31a627b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e240d517f92fb2b8e24bb6f1c1d98869b41adf8bc3ab687a4426840c7010235e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b871ea5e9d2093579af10786c02da9d7f5afa5351933be742b67b1b682733119\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://276d6d2f680f4e0b2038d12b161e3fd3f85417b5d776b9661559b4812ead1dd9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4c6884100d6c3b4944019e7d9e7a2bdaf2013d697ce3980863742a60a7b52d7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4c6884100d6c3b4944019e7d9e7a2bdaf2013d697ce3980863742a60a7b52d7f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-06T15:32:39Z\\\",\\\"message\\\":\\\"kipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}, built lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-marketplace/redhat-marketplace_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-marketplace/redhat-marketplace\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.140\\\\\\\", Port:50051, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI1206 15:32:39.139098 6445 ovnkube.go:599] Stopped ovnkube\\\\nI1206 15:32:39.139178 6445 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1206 15:32:39.139236 6445 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to s\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:38Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-p7xwd_openshift-ovn-kubernetes(8a695d94-271c-45bc-8a89-dfdecb57ec00)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c56c8c7aa570bb81c41923b20b09d6de6b278e56ca466e7e0ee3cecf113c37d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a00902f2c2c7ca56d86553f78094f40f59ea9ef125f5a388aafd6d7fd0c20d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9a00902f2c2c7ca56d86553f78094f40f59ea9ef125f5a388aafd6d7fd0c20d9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-p7xwd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:41Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:41 crc kubenswrapper[5003]: I1206 15:32:41.523819 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:41Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:41 crc kubenswrapper[5003]: I1206 15:32:41.547681 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:41 crc kubenswrapper[5003]: I1206 15:32:41.547741 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:41 crc kubenswrapper[5003]: I1206 15:32:41.547760 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:41 crc kubenswrapper[5003]: I1206 15:32:41.547788 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:41 crc kubenswrapper[5003]: I1206 15:32:41.547805 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:41Z","lastTransitionTime":"2025-12-06T15:32:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:41 crc kubenswrapper[5003]: I1206 15:32:41.655720 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:41 crc kubenswrapper[5003]: I1206 15:32:41.655795 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:41 crc kubenswrapper[5003]: I1206 15:32:41.655818 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:41 crc kubenswrapper[5003]: I1206 15:32:41.655845 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:41 crc kubenswrapper[5003]: I1206 15:32:41.655865 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:41Z","lastTransitionTime":"2025-12-06T15:32:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:41 crc kubenswrapper[5003]: I1206 15:32:41.711825 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 06 15:32:41 crc kubenswrapper[5003]: I1206 15:32:41.711850 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 06 15:32:41 crc kubenswrapper[5003]: I1206 15:32:41.711868 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 06 15:32:41 crc kubenswrapper[5003]: E1206 15:32:41.712456 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 06 15:32:41 crc kubenswrapper[5003]: E1206 15:32:41.712823 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 06 15:32:41 crc kubenswrapper[5003]: E1206 15:32:41.712707 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 06 15:32:41 crc kubenswrapper[5003]: I1206 15:32:41.737005 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa744e792aa97558246eaa95c80a307138b1a6e08fe5de144b3fff62f3932e20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:41Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:41 crc kubenswrapper[5003]: I1206 15:32:41.759212 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:41 crc kubenswrapper[5003]: I1206 15:32:41.759269 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:41 crc kubenswrapper[5003]: I1206 15:32:41.759289 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:41 crc kubenswrapper[5003]: I1206 15:32:41.759313 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:41 crc kubenswrapper[5003]: I1206 15:32:41.759330 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:41Z","lastTransitionTime":"2025-12-06T15:32:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:41 crc kubenswrapper[5003]: I1206 15:32:41.764029 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://224b845cf75da20cf87e5435770288110776a0edd92c9c110d6dc539419ecb7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0df37db3b8682032a0065b4ca1cead69bf0b32cb4e2fab89c53e14eb2169b7e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:41Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:41 crc kubenswrapper[5003]: I1206 15:32:41.791851 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-j4rf7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7dc41f9e-e763-4a9a-a064-f65bc24332b9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://467dcc8ed3936c6fb24f6c6c3e42eccf3597ce6920d12b253946a123ee22faf8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://79eb73c778f65e6694b0a3243db40d605557c86145f9b40ce2dfcf922ec7f38f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://79eb73c778f65e6694b0a3243db40d605557c86145f9b40ce2dfcf922ec7f38f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://10cf47f05fda3c702decadc5c1cf0b6fb4df56993610f4d7bc2809e685109f6c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://10cf47f05fda3c702decadc5c1cf0b6fb4df56993610f4d7bc2809e685109f6c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a33d5c66fd9234a89ff1f28e4863f80104b5d1caccd24cafa22a4598a42906c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a33d5c66fd9234a89ff1f28e4863f80104b5d1caccd24cafa22a4598a42906c3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://895f9f2647488793d0875e8cb05a6e6515733dae00a856e6a4705702adc3a5ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://895f9f2647488793d0875e8cb05a6e6515733dae00a856e6a4705702adc3a5ed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a02f680341e0dfb569ffcd7902d5b1fe3d575a423ff541d86c7bdd4d76b733b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a02f680341e0dfb569ffcd7902d5b1fe3d575a423ff541d86c7bdd4d76b733b0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f6cda90c763eedc34903164d30c28d3cb696f658455d0a972fbf8d8e5505a587\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f6cda90c763eedc34903164d30c28d3cb696f658455d0a972fbf8d8e5505a587\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-j4rf7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:41Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:41 crc kubenswrapper[5003]: I1206 15:32:41.810338 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/9fa121e1-7f2f-4912-945f-86cb199c3014-metrics-certs\") pod \"network-metrics-daemon-jmzd9\" (UID: \"9fa121e1-7f2f-4912-945f-86cb199c3014\") " pod="openshift-multus/network-metrics-daemon-jmzd9" Dec 06 15:32:41 crc kubenswrapper[5003]: E1206 15:32:41.810563 5003 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 06 15:32:41 crc kubenswrapper[5003]: E1206 15:32:41.810662 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/9fa121e1-7f2f-4912-945f-86cb199c3014-metrics-certs podName:9fa121e1-7f2f-4912-945f-86cb199c3014 nodeName:}" failed. No retries permitted until 2025-12-06 15:32:42.810634638 +0000 UTC m=+41.343989059 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/9fa121e1-7f2f-4912-945f-86cb199c3014-metrics-certs") pod "network-metrics-daemon-jmzd9" (UID: "9fa121e1-7f2f-4912-945f-86cb199c3014") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 06 15:32:41 crc kubenswrapper[5003]: I1206 15:32:41.814714 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5ae2d36f-5a31-4da3-aae8-0378c481f230\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9f683cdba22afa5d1d069cc32c6c83addf344c8b0ba3b39b7a2ca527408922c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c777cc5d07ff005aa8001da8a566484710f0253e4a6061e3297a884c6153a7d0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ac8291e7fa9a5ff379474802b6eae771542705aef4c443cc3123837d10dd9e5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae2215e89d65c7bd69288cda74cdd83f0349f57b47f80ff1fa03d60a484558b5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://efc65bc9bb60202069cb51eed80fb62e527399c4189554e791bab3e23993c95c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-06T15:32:20Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1206 15:32:14.957831 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1206 15:32:14.959306 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1213261896/tls.crt::/tmp/serving-cert-1213261896/tls.key\\\\\\\"\\\\nI1206 15:32:20.684925 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1206 15:32:20.690335 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1206 15:32:20.690422 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1206 15:32:20.690568 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1206 15:32:20.690599 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1206 15:32:20.700990 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1206 15:32:20.701016 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1206 15:32:20.701048 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1206 15:32:20.701062 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1206 15:32:20.701075 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1206 15:32:20.701085 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1206 15:32:20.701093 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1206 15:32:20.701100 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1206 15:32:20.702607 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:04Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6013b3a95ef99138e6ea971b51a45f65923c09435a2595bba7af91eebb28d76\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4256540a96bdcd018eb514cf70eea305513bb418afa89c8bfa5179c67822380e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4256540a96bdcd018eb514cf70eea305513bb418afa89c8bfa5179c67822380e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:41Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:41 crc kubenswrapper[5003]: I1206 15:32:41.836719 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c48c2555-6e1a-4e2a-801c-de22b016a80d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d0880a937c71f4a7b46ffc4dcc10f50f5db23655dedb5d7e95b36cf0fcb44928\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f5d5d47cea1d06df2f36a0a15126c2419e5051d84842c781921577bec3c65f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5e7cb74b8b6e2a00de6dbbd1689b52fcc3ae9862d40685bab7a805646abd4a3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3377599a5ceebbce7bd9d45a6f78122fc48d86ff917a6dcfc03304ca3361659\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:02Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:41Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:41 crc kubenswrapper[5003]: I1206 15:32:41.856043 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:41Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:41 crc kubenswrapper[5003]: I1206 15:32:41.862048 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:41 crc kubenswrapper[5003]: I1206 15:32:41.862130 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:41 crc kubenswrapper[5003]: I1206 15:32:41.862157 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:41 crc kubenswrapper[5003]: I1206 15:32:41.862186 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:41 crc kubenswrapper[5003]: I1206 15:32:41.862210 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:41Z","lastTransitionTime":"2025-12-06T15:32:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:41 crc kubenswrapper[5003]: I1206 15:32:41.877201 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:24Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:24Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ab9b858ed30211c345e146452a18252752eeedcb3be8054daa2af371297cbb61\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:41Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:41 crc kubenswrapper[5003]: I1206 15:32:41.909366 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7xwd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a695d94-271c-45bc-8a89-dfdecb57ec00\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b26a01a6a0644d88028ebc041e00353091f26407f109e02e4aec1ec0e382a4c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b549c3d1ba39068a4f1ce344e0807eed3b4e73ee1902d02cebffe005697201d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5240dcf05f3d661b92408f4c39999571d023d9acdeb31d4a4a99f50e31a627b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e240d517f92fb2b8e24bb6f1c1d98869b41adf8bc3ab687a4426840c7010235e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b871ea5e9d2093579af10786c02da9d7f5afa5351933be742b67b1b682733119\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://276d6d2f680f4e0b2038d12b161e3fd3f85417b5d776b9661559b4812ead1dd9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4c6884100d6c3b4944019e7d9e7a2bdaf2013d697ce3980863742a60a7b52d7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4c6884100d6c3b4944019e7d9e7a2bdaf2013d697ce3980863742a60a7b52d7f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-06T15:32:39Z\\\",\\\"message\\\":\\\"kipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}, built lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-marketplace/redhat-marketplace_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-marketplace/redhat-marketplace\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.140\\\\\\\", Port:50051, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI1206 15:32:39.139098 6445 ovnkube.go:599] Stopped ovnkube\\\\nI1206 15:32:39.139178 6445 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1206 15:32:39.139236 6445 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to s\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:38Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-p7xwd_openshift-ovn-kubernetes(8a695d94-271c-45bc-8a89-dfdecb57ec00)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c56c8c7aa570bb81c41923b20b09d6de6b278e56ca466e7e0ee3cecf113c37d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a00902f2c2c7ca56d86553f78094f40f59ea9ef125f5a388aafd6d7fd0c20d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9a00902f2c2c7ca56d86553f78094f40f59ea9ef125f5a388aafd6d7fd0c20d9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-p7xwd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:41Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:41 crc kubenswrapper[5003]: I1206 15:32:41.926140 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-9kdpn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"350e8b9a-b7bf-4dc9-abe9-d10f7a088be3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e9b32883e20da5b298374f7eebd0a21c5ca51cca23543fc78dfe3edb04e3b661\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-46c8r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-9kdpn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:41Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:41 crc kubenswrapper[5003]: I1206 15:32:41.940843 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-jmzd9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9fa121e1-7f2f-4912-945f-86cb199c3014\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:41Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:41Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9s4lw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9s4lw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:41Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-jmzd9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:41Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:41 crc kubenswrapper[5003]: I1206 15:32:41.963096 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:41Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:41 crc kubenswrapper[5003]: I1206 15:32:41.964223 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:41 crc kubenswrapper[5003]: I1206 15:32:41.964255 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:41 crc kubenswrapper[5003]: I1206 15:32:41.964265 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:41 crc kubenswrapper[5003]: I1206 15:32:41.964301 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:41 crc kubenswrapper[5003]: I1206 15:32:41.964313 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:41Z","lastTransitionTime":"2025-12-06T15:32:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:41 crc kubenswrapper[5003]: I1206 15:32:41.977942 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-qcqkl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"956ed4a8-8918-48eb-a2f8-f35a9ae17fde\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8ae5ee47d26422cffca5f12a4ee1455dcd34c148b1b490d69b88280b4497da11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbhrw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-qcqkl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:41Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:41 crc kubenswrapper[5003]: I1206 15:32:41.994788 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-w25db" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1a047c4d-003e-4668-9b96-945eab34ab68\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b3a6db6c2c8e6a283f7b46ef28ba7310d3cfc7054cfd8cbcc290ebd0d26dcf4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6dn2k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b457ba1d45f5e1524b8539c4c7dc77b24f6d5aa8172b46962e31e4fb6723b7ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6dn2k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:26Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-w25db\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:41Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:42 crc kubenswrapper[5003]: I1206 15:32:42.010330 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-7788j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f27a64b8-ecd1-4201-a3bd-a4f5a0aa05a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4bf147399c10dd7e654abd5213c4d90e8aa9feca7f8c6032c16576a25aeace68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twj7c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f47c00cc6d341dbe1a5f3495f04fe4e695370952f6d6b209a5aaff1ace9d17e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twj7c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:39Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-7788j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:42Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:42 crc kubenswrapper[5003]: I1206 15:32:42.028006 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:42Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:42 crc kubenswrapper[5003]: I1206 15:32:42.038057 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-mdz5n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2a9b6e3b-054f-40de-9ad6-dd7eb78eaea1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cac97fc8a54c2fbd81b0abade11a987e5226084d8a4f75ddbbaad1f6c4cb9783\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wqmpg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:25Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-mdz5n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:42Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:42 crc kubenswrapper[5003]: I1206 15:32:42.065912 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:42 crc kubenswrapper[5003]: I1206 15:32:42.065940 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:42 crc kubenswrapper[5003]: I1206 15:32:42.065949 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:42 crc kubenswrapper[5003]: I1206 15:32:42.065961 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:42 crc kubenswrapper[5003]: I1206 15:32:42.065969 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:42Z","lastTransitionTime":"2025-12-06T15:32:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:42 crc kubenswrapper[5003]: I1206 15:32:42.168212 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:42 crc kubenswrapper[5003]: I1206 15:32:42.168277 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:42 crc kubenswrapper[5003]: I1206 15:32:42.168288 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:42 crc kubenswrapper[5003]: I1206 15:32:42.168303 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:42 crc kubenswrapper[5003]: I1206 15:32:42.168313 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:42Z","lastTransitionTime":"2025-12-06T15:32:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:42 crc kubenswrapper[5003]: I1206 15:32:42.270924 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:42 crc kubenswrapper[5003]: I1206 15:32:42.271027 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:42 crc kubenswrapper[5003]: I1206 15:32:42.271041 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:42 crc kubenswrapper[5003]: I1206 15:32:42.271060 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:42 crc kubenswrapper[5003]: I1206 15:32:42.271072 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:42Z","lastTransitionTime":"2025-12-06T15:32:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:42 crc kubenswrapper[5003]: I1206 15:32:42.374089 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:42 crc kubenswrapper[5003]: I1206 15:32:42.374199 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:42 crc kubenswrapper[5003]: I1206 15:32:42.374227 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:42 crc kubenswrapper[5003]: I1206 15:32:42.374259 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:42 crc kubenswrapper[5003]: I1206 15:32:42.374289 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:42Z","lastTransitionTime":"2025-12-06T15:32:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:42 crc kubenswrapper[5003]: I1206 15:32:42.477056 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:42 crc kubenswrapper[5003]: I1206 15:32:42.477098 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:42 crc kubenswrapper[5003]: I1206 15:32:42.477110 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:42 crc kubenswrapper[5003]: I1206 15:32:42.477127 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:42 crc kubenswrapper[5003]: I1206 15:32:42.477139 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:42Z","lastTransitionTime":"2025-12-06T15:32:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:42 crc kubenswrapper[5003]: I1206 15:32:42.579889 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:42 crc kubenswrapper[5003]: I1206 15:32:42.579940 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:42 crc kubenswrapper[5003]: I1206 15:32:42.579954 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:42 crc kubenswrapper[5003]: I1206 15:32:42.579973 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:42 crc kubenswrapper[5003]: I1206 15:32:42.579987 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:42Z","lastTransitionTime":"2025-12-06T15:32:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:42 crc kubenswrapper[5003]: I1206 15:32:42.682737 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:42 crc kubenswrapper[5003]: I1206 15:32:42.682786 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:42 crc kubenswrapper[5003]: I1206 15:32:42.682804 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:42 crc kubenswrapper[5003]: I1206 15:32:42.682829 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:42 crc kubenswrapper[5003]: I1206 15:32:42.682846 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:42Z","lastTransitionTime":"2025-12-06T15:32:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:42 crc kubenswrapper[5003]: I1206 15:32:42.711257 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-jmzd9" Dec 06 15:32:42 crc kubenswrapper[5003]: E1206 15:32:42.711403 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-jmzd9" podUID="9fa121e1-7f2f-4912-945f-86cb199c3014" Dec 06 15:32:42 crc kubenswrapper[5003]: I1206 15:32:42.785626 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:42 crc kubenswrapper[5003]: I1206 15:32:42.785679 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:42 crc kubenswrapper[5003]: I1206 15:32:42.785694 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:42 crc kubenswrapper[5003]: I1206 15:32:42.785714 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:42 crc kubenswrapper[5003]: I1206 15:32:42.785730 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:42Z","lastTransitionTime":"2025-12-06T15:32:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:42 crc kubenswrapper[5003]: I1206 15:32:42.820967 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/9fa121e1-7f2f-4912-945f-86cb199c3014-metrics-certs\") pod \"network-metrics-daemon-jmzd9\" (UID: \"9fa121e1-7f2f-4912-945f-86cb199c3014\") " pod="openshift-multus/network-metrics-daemon-jmzd9" Dec 06 15:32:42 crc kubenswrapper[5003]: E1206 15:32:42.821152 5003 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 06 15:32:42 crc kubenswrapper[5003]: E1206 15:32:42.821226 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/9fa121e1-7f2f-4912-945f-86cb199c3014-metrics-certs podName:9fa121e1-7f2f-4912-945f-86cb199c3014 nodeName:}" failed. No retries permitted until 2025-12-06 15:32:44.821208254 +0000 UTC m=+43.354562645 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/9fa121e1-7f2f-4912-945f-86cb199c3014-metrics-certs") pod "network-metrics-daemon-jmzd9" (UID: "9fa121e1-7f2f-4912-945f-86cb199c3014") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 06 15:32:42 crc kubenswrapper[5003]: I1206 15:32:42.888093 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:42 crc kubenswrapper[5003]: I1206 15:32:42.888129 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:42 crc kubenswrapper[5003]: I1206 15:32:42.888140 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:42 crc kubenswrapper[5003]: I1206 15:32:42.888158 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:42 crc kubenswrapper[5003]: I1206 15:32:42.888171 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:42Z","lastTransitionTime":"2025-12-06T15:32:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:42 crc kubenswrapper[5003]: I1206 15:32:42.991783 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:42 crc kubenswrapper[5003]: I1206 15:32:42.991852 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:42 crc kubenswrapper[5003]: I1206 15:32:42.991874 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:42 crc kubenswrapper[5003]: I1206 15:32:42.991907 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:42 crc kubenswrapper[5003]: I1206 15:32:42.991929 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:42Z","lastTransitionTime":"2025-12-06T15:32:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:43 crc kubenswrapper[5003]: I1206 15:32:43.094616 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:43 crc kubenswrapper[5003]: I1206 15:32:43.094700 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:43 crc kubenswrapper[5003]: I1206 15:32:43.094724 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:43 crc kubenswrapper[5003]: I1206 15:32:43.094754 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:43 crc kubenswrapper[5003]: I1206 15:32:43.094776 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:43Z","lastTransitionTime":"2025-12-06T15:32:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:43 crc kubenswrapper[5003]: I1206 15:32:43.197331 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:43 crc kubenswrapper[5003]: I1206 15:32:43.197421 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:43 crc kubenswrapper[5003]: I1206 15:32:43.197457 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:43 crc kubenswrapper[5003]: I1206 15:32:43.197476 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:43 crc kubenswrapper[5003]: I1206 15:32:43.197507 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:43Z","lastTransitionTime":"2025-12-06T15:32:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:43 crc kubenswrapper[5003]: I1206 15:32:43.300010 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:43 crc kubenswrapper[5003]: I1206 15:32:43.300082 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:43 crc kubenswrapper[5003]: I1206 15:32:43.300101 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:43 crc kubenswrapper[5003]: I1206 15:32:43.300123 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:43 crc kubenswrapper[5003]: I1206 15:32:43.300138 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:43Z","lastTransitionTime":"2025-12-06T15:32:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:43 crc kubenswrapper[5003]: I1206 15:32:43.402829 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:43 crc kubenswrapper[5003]: I1206 15:32:43.402884 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:43 crc kubenswrapper[5003]: I1206 15:32:43.402898 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:43 crc kubenswrapper[5003]: I1206 15:32:43.402914 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:43 crc kubenswrapper[5003]: I1206 15:32:43.402926 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:43Z","lastTransitionTime":"2025-12-06T15:32:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:43 crc kubenswrapper[5003]: I1206 15:32:43.505440 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:43 crc kubenswrapper[5003]: I1206 15:32:43.505547 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:43 crc kubenswrapper[5003]: I1206 15:32:43.505576 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:43 crc kubenswrapper[5003]: I1206 15:32:43.505607 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:43 crc kubenswrapper[5003]: I1206 15:32:43.505634 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:43Z","lastTransitionTime":"2025-12-06T15:32:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:43 crc kubenswrapper[5003]: I1206 15:32:43.608927 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:43 crc kubenswrapper[5003]: I1206 15:32:43.608998 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:43 crc kubenswrapper[5003]: I1206 15:32:43.609015 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:43 crc kubenswrapper[5003]: I1206 15:32:43.609036 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:43 crc kubenswrapper[5003]: I1206 15:32:43.609050 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:43Z","lastTransitionTime":"2025-12-06T15:32:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:43 crc kubenswrapper[5003]: I1206 15:32:43.711433 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 06 15:32:43 crc kubenswrapper[5003]: I1206 15:32:43.711463 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 06 15:32:43 crc kubenswrapper[5003]: I1206 15:32:43.711596 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 06 15:32:43 crc kubenswrapper[5003]: E1206 15:32:43.711761 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 06 15:32:43 crc kubenswrapper[5003]: E1206 15:32:43.712020 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 06 15:32:43 crc kubenswrapper[5003]: E1206 15:32:43.712181 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 06 15:32:43 crc kubenswrapper[5003]: I1206 15:32:43.712190 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:43 crc kubenswrapper[5003]: I1206 15:32:43.712823 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:43 crc kubenswrapper[5003]: I1206 15:32:43.712870 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:43 crc kubenswrapper[5003]: I1206 15:32:43.712905 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:43 crc kubenswrapper[5003]: I1206 15:32:43.712990 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:43Z","lastTransitionTime":"2025-12-06T15:32:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:43 crc kubenswrapper[5003]: I1206 15:32:43.816064 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:43 crc kubenswrapper[5003]: I1206 15:32:43.816555 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:43 crc kubenswrapper[5003]: I1206 15:32:43.816775 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:43 crc kubenswrapper[5003]: I1206 15:32:43.816940 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:43 crc kubenswrapper[5003]: I1206 15:32:43.817118 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:43Z","lastTransitionTime":"2025-12-06T15:32:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:43 crc kubenswrapper[5003]: I1206 15:32:43.920687 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:43 crc kubenswrapper[5003]: I1206 15:32:43.920901 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:43 crc kubenswrapper[5003]: I1206 15:32:43.920989 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:43 crc kubenswrapper[5003]: I1206 15:32:43.921060 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:43 crc kubenswrapper[5003]: I1206 15:32:43.921174 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:43Z","lastTransitionTime":"2025-12-06T15:32:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:44 crc kubenswrapper[5003]: I1206 15:32:44.022711 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:44 crc kubenswrapper[5003]: I1206 15:32:44.022964 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:44 crc kubenswrapper[5003]: I1206 15:32:44.023205 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:44 crc kubenswrapper[5003]: I1206 15:32:44.023280 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:44 crc kubenswrapper[5003]: I1206 15:32:44.023341 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:44Z","lastTransitionTime":"2025-12-06T15:32:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:44 crc kubenswrapper[5003]: I1206 15:32:44.126343 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:44 crc kubenswrapper[5003]: I1206 15:32:44.126390 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:44 crc kubenswrapper[5003]: I1206 15:32:44.126401 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:44 crc kubenswrapper[5003]: I1206 15:32:44.126417 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:44 crc kubenswrapper[5003]: I1206 15:32:44.126430 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:44Z","lastTransitionTime":"2025-12-06T15:32:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:44 crc kubenswrapper[5003]: I1206 15:32:44.228473 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:44 crc kubenswrapper[5003]: I1206 15:32:44.228528 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:44 crc kubenswrapper[5003]: I1206 15:32:44.228537 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:44 crc kubenswrapper[5003]: I1206 15:32:44.228549 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:44 crc kubenswrapper[5003]: I1206 15:32:44.228560 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:44Z","lastTransitionTime":"2025-12-06T15:32:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:44 crc kubenswrapper[5003]: I1206 15:32:44.332735 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:44 crc kubenswrapper[5003]: I1206 15:32:44.332778 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:44 crc kubenswrapper[5003]: I1206 15:32:44.332790 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:44 crc kubenswrapper[5003]: I1206 15:32:44.332807 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:44 crc kubenswrapper[5003]: I1206 15:32:44.332820 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:44Z","lastTransitionTime":"2025-12-06T15:32:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:44 crc kubenswrapper[5003]: I1206 15:32:44.435842 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:44 crc kubenswrapper[5003]: I1206 15:32:44.435989 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:44 crc kubenswrapper[5003]: I1206 15:32:44.436010 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:44 crc kubenswrapper[5003]: I1206 15:32:44.436035 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:44 crc kubenswrapper[5003]: I1206 15:32:44.436052 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:44Z","lastTransitionTime":"2025-12-06T15:32:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:44 crc kubenswrapper[5003]: I1206 15:32:44.539168 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:44 crc kubenswrapper[5003]: I1206 15:32:44.539312 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:44 crc kubenswrapper[5003]: I1206 15:32:44.539346 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:44 crc kubenswrapper[5003]: I1206 15:32:44.539438 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:44 crc kubenswrapper[5003]: I1206 15:32:44.539557 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:44Z","lastTransitionTime":"2025-12-06T15:32:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:44 crc kubenswrapper[5003]: I1206 15:32:44.642317 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:44 crc kubenswrapper[5003]: I1206 15:32:44.642368 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:44 crc kubenswrapper[5003]: I1206 15:32:44.642385 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:44 crc kubenswrapper[5003]: I1206 15:32:44.642409 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:44 crc kubenswrapper[5003]: I1206 15:32:44.642426 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:44Z","lastTransitionTime":"2025-12-06T15:32:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:44 crc kubenswrapper[5003]: I1206 15:32:44.711685 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-jmzd9" Dec 06 15:32:44 crc kubenswrapper[5003]: E1206 15:32:44.711835 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-jmzd9" podUID="9fa121e1-7f2f-4912-945f-86cb199c3014" Dec 06 15:32:44 crc kubenswrapper[5003]: I1206 15:32:44.744808 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:44 crc kubenswrapper[5003]: I1206 15:32:44.744854 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:44 crc kubenswrapper[5003]: I1206 15:32:44.744871 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:44 crc kubenswrapper[5003]: I1206 15:32:44.744893 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:44 crc kubenswrapper[5003]: I1206 15:32:44.744910 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:44Z","lastTransitionTime":"2025-12-06T15:32:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:44 crc kubenswrapper[5003]: I1206 15:32:44.840981 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/9fa121e1-7f2f-4912-945f-86cb199c3014-metrics-certs\") pod \"network-metrics-daemon-jmzd9\" (UID: \"9fa121e1-7f2f-4912-945f-86cb199c3014\") " pod="openshift-multus/network-metrics-daemon-jmzd9" Dec 06 15:32:44 crc kubenswrapper[5003]: E1206 15:32:44.841152 5003 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 06 15:32:44 crc kubenswrapper[5003]: E1206 15:32:44.841209 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/9fa121e1-7f2f-4912-945f-86cb199c3014-metrics-certs podName:9fa121e1-7f2f-4912-945f-86cb199c3014 nodeName:}" failed. No retries permitted until 2025-12-06 15:32:48.841190558 +0000 UTC m=+47.374544949 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/9fa121e1-7f2f-4912-945f-86cb199c3014-metrics-certs") pod "network-metrics-daemon-jmzd9" (UID: "9fa121e1-7f2f-4912-945f-86cb199c3014") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 06 15:32:44 crc kubenswrapper[5003]: I1206 15:32:44.848212 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:44 crc kubenswrapper[5003]: I1206 15:32:44.848270 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:44 crc kubenswrapper[5003]: I1206 15:32:44.848288 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:44 crc kubenswrapper[5003]: I1206 15:32:44.848311 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:44 crc kubenswrapper[5003]: I1206 15:32:44.848328 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:44Z","lastTransitionTime":"2025-12-06T15:32:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:44 crc kubenswrapper[5003]: I1206 15:32:44.951239 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:44 crc kubenswrapper[5003]: I1206 15:32:44.951341 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:44 crc kubenswrapper[5003]: I1206 15:32:44.951359 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:44 crc kubenswrapper[5003]: I1206 15:32:44.951388 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:44 crc kubenswrapper[5003]: I1206 15:32:44.951406 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:44Z","lastTransitionTime":"2025-12-06T15:32:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:45 crc kubenswrapper[5003]: I1206 15:32:45.053774 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:45 crc kubenswrapper[5003]: I1206 15:32:45.053840 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:45 crc kubenswrapper[5003]: I1206 15:32:45.053859 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:45 crc kubenswrapper[5003]: I1206 15:32:45.053886 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:45 crc kubenswrapper[5003]: I1206 15:32:45.053906 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:45Z","lastTransitionTime":"2025-12-06T15:32:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:45 crc kubenswrapper[5003]: I1206 15:32:45.084119 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:45 crc kubenswrapper[5003]: I1206 15:32:45.084165 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:45 crc kubenswrapper[5003]: I1206 15:32:45.084180 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:45 crc kubenswrapper[5003]: I1206 15:32:45.084207 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:45 crc kubenswrapper[5003]: I1206 15:32:45.084224 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:45Z","lastTransitionTime":"2025-12-06T15:32:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:45 crc kubenswrapper[5003]: E1206 15:32:45.104405 5003 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-06T15:32:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-06T15:32:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:45Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-06T15:32:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-06T15:32:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:45Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"010ec561-c8bb-454b-ab74-658add58caba\\\",\\\"systemUUID\\\":\\\"42b9e6db-6fd4-4e84-a5b7-176f28bfe2f1\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:45Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:45 crc kubenswrapper[5003]: I1206 15:32:45.109470 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:45 crc kubenswrapper[5003]: I1206 15:32:45.109586 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:45 crc kubenswrapper[5003]: I1206 15:32:45.109608 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:45 crc kubenswrapper[5003]: I1206 15:32:45.109636 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:45 crc kubenswrapper[5003]: I1206 15:32:45.109653 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:45Z","lastTransitionTime":"2025-12-06T15:32:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:45 crc kubenswrapper[5003]: E1206 15:32:45.128319 5003 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-06T15:32:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-06T15:32:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:45Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-06T15:32:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-06T15:32:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:45Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"010ec561-c8bb-454b-ab74-658add58caba\\\",\\\"systemUUID\\\":\\\"42b9e6db-6fd4-4e84-a5b7-176f28bfe2f1\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:45Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:45 crc kubenswrapper[5003]: I1206 15:32:45.134403 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:45 crc kubenswrapper[5003]: I1206 15:32:45.134456 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:45 crc kubenswrapper[5003]: I1206 15:32:45.134471 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:45 crc kubenswrapper[5003]: I1206 15:32:45.134520 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:45 crc kubenswrapper[5003]: I1206 15:32:45.134543 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:45Z","lastTransitionTime":"2025-12-06T15:32:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:45 crc kubenswrapper[5003]: E1206 15:32:45.153085 5003 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-06T15:32:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-06T15:32:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:45Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-06T15:32:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-06T15:32:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:45Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"010ec561-c8bb-454b-ab74-658add58caba\\\",\\\"systemUUID\\\":\\\"42b9e6db-6fd4-4e84-a5b7-176f28bfe2f1\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:45Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:45 crc kubenswrapper[5003]: I1206 15:32:45.159906 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:45 crc kubenswrapper[5003]: I1206 15:32:45.159945 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:45 crc kubenswrapper[5003]: I1206 15:32:45.159968 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:45 crc kubenswrapper[5003]: I1206 15:32:45.159988 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:45 crc kubenswrapper[5003]: I1206 15:32:45.160001 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:45Z","lastTransitionTime":"2025-12-06T15:32:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:45 crc kubenswrapper[5003]: E1206 15:32:45.177835 5003 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-06T15:32:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-06T15:32:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:45Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-06T15:32:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-06T15:32:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:45Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"010ec561-c8bb-454b-ab74-658add58caba\\\",\\\"systemUUID\\\":\\\"42b9e6db-6fd4-4e84-a5b7-176f28bfe2f1\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:45Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:45 crc kubenswrapper[5003]: I1206 15:32:45.183040 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:45 crc kubenswrapper[5003]: I1206 15:32:45.183076 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:45 crc kubenswrapper[5003]: I1206 15:32:45.183085 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:45 crc kubenswrapper[5003]: I1206 15:32:45.183102 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:45 crc kubenswrapper[5003]: I1206 15:32:45.183112 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:45Z","lastTransitionTime":"2025-12-06T15:32:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:45 crc kubenswrapper[5003]: E1206 15:32:45.202145 5003 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-06T15:32:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-06T15:32:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:45Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-06T15:32:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-06T15:32:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:45Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"010ec561-c8bb-454b-ab74-658add58caba\\\",\\\"systemUUID\\\":\\\"42b9e6db-6fd4-4e84-a5b7-176f28bfe2f1\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:45Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:45 crc kubenswrapper[5003]: E1206 15:32:45.202340 5003 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 06 15:32:45 crc kubenswrapper[5003]: I1206 15:32:45.204293 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:45 crc kubenswrapper[5003]: I1206 15:32:45.204338 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:45 crc kubenswrapper[5003]: I1206 15:32:45.204347 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:45 crc kubenswrapper[5003]: I1206 15:32:45.204366 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:45 crc kubenswrapper[5003]: I1206 15:32:45.204378 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:45Z","lastTransitionTime":"2025-12-06T15:32:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:45 crc kubenswrapper[5003]: I1206 15:32:45.312278 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:45 crc kubenswrapper[5003]: I1206 15:32:45.312352 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:45 crc kubenswrapper[5003]: I1206 15:32:45.312365 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:45 crc kubenswrapper[5003]: I1206 15:32:45.312387 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:45 crc kubenswrapper[5003]: I1206 15:32:45.312405 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:45Z","lastTransitionTime":"2025-12-06T15:32:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:45 crc kubenswrapper[5003]: I1206 15:32:45.416231 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:45 crc kubenswrapper[5003]: I1206 15:32:45.416316 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:45 crc kubenswrapper[5003]: I1206 15:32:45.416336 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:45 crc kubenswrapper[5003]: I1206 15:32:45.416363 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:45 crc kubenswrapper[5003]: I1206 15:32:45.416383 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:45Z","lastTransitionTime":"2025-12-06T15:32:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:45 crc kubenswrapper[5003]: I1206 15:32:45.520107 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:45 crc kubenswrapper[5003]: I1206 15:32:45.520159 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:45 crc kubenswrapper[5003]: I1206 15:32:45.520178 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:45 crc kubenswrapper[5003]: I1206 15:32:45.520202 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:45 crc kubenswrapper[5003]: I1206 15:32:45.520220 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:45Z","lastTransitionTime":"2025-12-06T15:32:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:45 crc kubenswrapper[5003]: I1206 15:32:45.623058 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:45 crc kubenswrapper[5003]: I1206 15:32:45.623557 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:45 crc kubenswrapper[5003]: I1206 15:32:45.623788 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:45 crc kubenswrapper[5003]: I1206 15:32:45.624001 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:45 crc kubenswrapper[5003]: I1206 15:32:45.624176 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:45Z","lastTransitionTime":"2025-12-06T15:32:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:45 crc kubenswrapper[5003]: I1206 15:32:45.712235 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 06 15:32:45 crc kubenswrapper[5003]: I1206 15:32:45.712285 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 06 15:32:45 crc kubenswrapper[5003]: I1206 15:32:45.712518 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 06 15:32:45 crc kubenswrapper[5003]: E1206 15:32:45.712557 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 06 15:32:45 crc kubenswrapper[5003]: E1206 15:32:45.712624 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 06 15:32:45 crc kubenswrapper[5003]: E1206 15:32:45.712792 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 06 15:32:45 crc kubenswrapper[5003]: I1206 15:32:45.729733 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:45 crc kubenswrapper[5003]: I1206 15:32:45.729800 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:45 crc kubenswrapper[5003]: I1206 15:32:45.729822 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:45 crc kubenswrapper[5003]: I1206 15:32:45.729847 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:45 crc kubenswrapper[5003]: I1206 15:32:45.729869 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:45Z","lastTransitionTime":"2025-12-06T15:32:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:45 crc kubenswrapper[5003]: I1206 15:32:45.832800 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:45 crc kubenswrapper[5003]: I1206 15:32:45.833137 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:45 crc kubenswrapper[5003]: I1206 15:32:45.833358 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:45 crc kubenswrapper[5003]: I1206 15:32:45.833612 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:45 crc kubenswrapper[5003]: I1206 15:32:45.833808 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:45Z","lastTransitionTime":"2025-12-06T15:32:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:45 crc kubenswrapper[5003]: I1206 15:32:45.938682 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:45 crc kubenswrapper[5003]: I1206 15:32:45.938783 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:45 crc kubenswrapper[5003]: I1206 15:32:45.938798 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:45 crc kubenswrapper[5003]: I1206 15:32:45.938828 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:45 crc kubenswrapper[5003]: I1206 15:32:45.938846 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:45Z","lastTransitionTime":"2025-12-06T15:32:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:46 crc kubenswrapper[5003]: I1206 15:32:46.041074 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:46 crc kubenswrapper[5003]: I1206 15:32:46.041159 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:46 crc kubenswrapper[5003]: I1206 15:32:46.041179 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:46 crc kubenswrapper[5003]: I1206 15:32:46.041200 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:46 crc kubenswrapper[5003]: I1206 15:32:46.041216 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:46Z","lastTransitionTime":"2025-12-06T15:32:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:46 crc kubenswrapper[5003]: I1206 15:32:46.143930 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:46 crc kubenswrapper[5003]: I1206 15:32:46.143968 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:46 crc kubenswrapper[5003]: I1206 15:32:46.143981 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:46 crc kubenswrapper[5003]: I1206 15:32:46.143996 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:46 crc kubenswrapper[5003]: I1206 15:32:46.144007 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:46Z","lastTransitionTime":"2025-12-06T15:32:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:46 crc kubenswrapper[5003]: I1206 15:32:46.246341 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:46 crc kubenswrapper[5003]: I1206 15:32:46.246413 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:46 crc kubenswrapper[5003]: I1206 15:32:46.246433 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:46 crc kubenswrapper[5003]: I1206 15:32:46.246456 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:46 crc kubenswrapper[5003]: I1206 15:32:46.246473 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:46Z","lastTransitionTime":"2025-12-06T15:32:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:46 crc kubenswrapper[5003]: I1206 15:32:46.349766 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:46 crc kubenswrapper[5003]: I1206 15:32:46.349804 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:46 crc kubenswrapper[5003]: I1206 15:32:46.349813 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:46 crc kubenswrapper[5003]: I1206 15:32:46.349828 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:46 crc kubenswrapper[5003]: I1206 15:32:46.349842 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:46Z","lastTransitionTime":"2025-12-06T15:32:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:46 crc kubenswrapper[5003]: I1206 15:32:46.452560 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:46 crc kubenswrapper[5003]: I1206 15:32:46.452608 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:46 crc kubenswrapper[5003]: I1206 15:32:46.452622 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:46 crc kubenswrapper[5003]: I1206 15:32:46.452641 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:46 crc kubenswrapper[5003]: I1206 15:32:46.452654 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:46Z","lastTransitionTime":"2025-12-06T15:32:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:46 crc kubenswrapper[5003]: I1206 15:32:46.556201 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:46 crc kubenswrapper[5003]: I1206 15:32:46.556252 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:46 crc kubenswrapper[5003]: I1206 15:32:46.556262 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:46 crc kubenswrapper[5003]: I1206 15:32:46.556280 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:46 crc kubenswrapper[5003]: I1206 15:32:46.556290 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:46Z","lastTransitionTime":"2025-12-06T15:32:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:46 crc kubenswrapper[5003]: I1206 15:32:46.659119 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:46 crc kubenswrapper[5003]: I1206 15:32:46.659172 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:46 crc kubenswrapper[5003]: I1206 15:32:46.659183 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:46 crc kubenswrapper[5003]: I1206 15:32:46.659198 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:46 crc kubenswrapper[5003]: I1206 15:32:46.659207 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:46Z","lastTransitionTime":"2025-12-06T15:32:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:46 crc kubenswrapper[5003]: I1206 15:32:46.712201 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-jmzd9" Dec 06 15:32:46 crc kubenswrapper[5003]: E1206 15:32:46.712341 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-jmzd9" podUID="9fa121e1-7f2f-4912-945f-86cb199c3014" Dec 06 15:32:46 crc kubenswrapper[5003]: I1206 15:32:46.762394 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:46 crc kubenswrapper[5003]: I1206 15:32:46.762464 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:46 crc kubenswrapper[5003]: I1206 15:32:46.762476 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:46 crc kubenswrapper[5003]: I1206 15:32:46.762529 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:46 crc kubenswrapper[5003]: I1206 15:32:46.762542 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:46Z","lastTransitionTime":"2025-12-06T15:32:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:46 crc kubenswrapper[5003]: I1206 15:32:46.865193 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:46 crc kubenswrapper[5003]: I1206 15:32:46.865244 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:46 crc kubenswrapper[5003]: I1206 15:32:46.865255 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:46 crc kubenswrapper[5003]: I1206 15:32:46.865326 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:46 crc kubenswrapper[5003]: I1206 15:32:46.865337 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:46Z","lastTransitionTime":"2025-12-06T15:32:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:46 crc kubenswrapper[5003]: I1206 15:32:46.968605 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:46 crc kubenswrapper[5003]: I1206 15:32:46.968646 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:46 crc kubenswrapper[5003]: I1206 15:32:46.968660 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:46 crc kubenswrapper[5003]: I1206 15:32:46.968676 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:46 crc kubenswrapper[5003]: I1206 15:32:46.968687 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:46Z","lastTransitionTime":"2025-12-06T15:32:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:47 crc kubenswrapper[5003]: I1206 15:32:47.071266 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:47 crc kubenswrapper[5003]: I1206 15:32:47.071331 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:47 crc kubenswrapper[5003]: I1206 15:32:47.071343 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:47 crc kubenswrapper[5003]: I1206 15:32:47.071360 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:47 crc kubenswrapper[5003]: I1206 15:32:47.071372 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:47Z","lastTransitionTime":"2025-12-06T15:32:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:47 crc kubenswrapper[5003]: I1206 15:32:47.174560 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:47 crc kubenswrapper[5003]: I1206 15:32:47.174610 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:47 crc kubenswrapper[5003]: I1206 15:32:47.174635 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:47 crc kubenswrapper[5003]: I1206 15:32:47.174665 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:47 crc kubenswrapper[5003]: I1206 15:32:47.174687 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:47Z","lastTransitionTime":"2025-12-06T15:32:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:47 crc kubenswrapper[5003]: I1206 15:32:47.277096 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:47 crc kubenswrapper[5003]: I1206 15:32:47.277158 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:47 crc kubenswrapper[5003]: I1206 15:32:47.277178 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:47 crc kubenswrapper[5003]: I1206 15:32:47.277206 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:47 crc kubenswrapper[5003]: I1206 15:32:47.277230 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:47Z","lastTransitionTime":"2025-12-06T15:32:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:47 crc kubenswrapper[5003]: I1206 15:32:47.380084 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:47 crc kubenswrapper[5003]: I1206 15:32:47.380140 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:47 crc kubenswrapper[5003]: I1206 15:32:47.380157 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:47 crc kubenswrapper[5003]: I1206 15:32:47.380180 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:47 crc kubenswrapper[5003]: I1206 15:32:47.380198 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:47Z","lastTransitionTime":"2025-12-06T15:32:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:47 crc kubenswrapper[5003]: I1206 15:32:47.484479 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:47 crc kubenswrapper[5003]: I1206 15:32:47.484574 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:47 crc kubenswrapper[5003]: I1206 15:32:47.484591 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:47 crc kubenswrapper[5003]: I1206 15:32:47.484618 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:47 crc kubenswrapper[5003]: I1206 15:32:47.484636 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:47Z","lastTransitionTime":"2025-12-06T15:32:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:47 crc kubenswrapper[5003]: I1206 15:32:47.586859 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:47 crc kubenswrapper[5003]: I1206 15:32:47.586925 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:47 crc kubenswrapper[5003]: I1206 15:32:47.586947 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:47 crc kubenswrapper[5003]: I1206 15:32:47.586967 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:47 crc kubenswrapper[5003]: I1206 15:32:47.586980 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:47Z","lastTransitionTime":"2025-12-06T15:32:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:47 crc kubenswrapper[5003]: I1206 15:32:47.689748 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:47 crc kubenswrapper[5003]: I1206 15:32:47.689801 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:47 crc kubenswrapper[5003]: I1206 15:32:47.689844 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:47 crc kubenswrapper[5003]: I1206 15:32:47.689868 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:47 crc kubenswrapper[5003]: I1206 15:32:47.689883 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:47Z","lastTransitionTime":"2025-12-06T15:32:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:47 crc kubenswrapper[5003]: I1206 15:32:47.712401 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 06 15:32:47 crc kubenswrapper[5003]: E1206 15:32:47.712596 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 06 15:32:47 crc kubenswrapper[5003]: I1206 15:32:47.712717 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 06 15:32:47 crc kubenswrapper[5003]: I1206 15:32:47.712731 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 06 15:32:47 crc kubenswrapper[5003]: E1206 15:32:47.712956 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 06 15:32:47 crc kubenswrapper[5003]: E1206 15:32:47.712992 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 06 15:32:47 crc kubenswrapper[5003]: I1206 15:32:47.793350 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:47 crc kubenswrapper[5003]: I1206 15:32:47.793403 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:47 crc kubenswrapper[5003]: I1206 15:32:47.793414 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:47 crc kubenswrapper[5003]: I1206 15:32:47.793433 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:47 crc kubenswrapper[5003]: I1206 15:32:47.793445 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:47Z","lastTransitionTime":"2025-12-06T15:32:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:47 crc kubenswrapper[5003]: I1206 15:32:47.896606 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:47 crc kubenswrapper[5003]: I1206 15:32:47.896705 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:47 crc kubenswrapper[5003]: I1206 15:32:47.896726 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:47 crc kubenswrapper[5003]: I1206 15:32:47.896748 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:47 crc kubenswrapper[5003]: I1206 15:32:47.896765 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:47Z","lastTransitionTime":"2025-12-06T15:32:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:48 crc kubenswrapper[5003]: I1206 15:32:48.000091 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:48 crc kubenswrapper[5003]: I1206 15:32:48.000120 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:48 crc kubenswrapper[5003]: I1206 15:32:48.000128 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:48 crc kubenswrapper[5003]: I1206 15:32:48.000142 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:48 crc kubenswrapper[5003]: I1206 15:32:48.000151 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:48Z","lastTransitionTime":"2025-12-06T15:32:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:48 crc kubenswrapper[5003]: I1206 15:32:48.103323 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:48 crc kubenswrapper[5003]: I1206 15:32:48.103396 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:48 crc kubenswrapper[5003]: I1206 15:32:48.103417 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:48 crc kubenswrapper[5003]: I1206 15:32:48.103445 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:48 crc kubenswrapper[5003]: I1206 15:32:48.103464 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:48Z","lastTransitionTime":"2025-12-06T15:32:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:48 crc kubenswrapper[5003]: I1206 15:32:48.206283 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:48 crc kubenswrapper[5003]: I1206 15:32:48.206374 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:48 crc kubenswrapper[5003]: I1206 15:32:48.206397 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:48 crc kubenswrapper[5003]: I1206 15:32:48.206422 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:48 crc kubenswrapper[5003]: I1206 15:32:48.206441 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:48Z","lastTransitionTime":"2025-12-06T15:32:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:48 crc kubenswrapper[5003]: I1206 15:32:48.308953 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:48 crc kubenswrapper[5003]: I1206 15:32:48.309016 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:48 crc kubenswrapper[5003]: I1206 15:32:48.309035 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:48 crc kubenswrapper[5003]: I1206 15:32:48.309062 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:48 crc kubenswrapper[5003]: I1206 15:32:48.309080 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:48Z","lastTransitionTime":"2025-12-06T15:32:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:48 crc kubenswrapper[5003]: I1206 15:32:48.411984 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:48 crc kubenswrapper[5003]: I1206 15:32:48.412048 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:48 crc kubenswrapper[5003]: I1206 15:32:48.412066 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:48 crc kubenswrapper[5003]: I1206 15:32:48.412090 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:48 crc kubenswrapper[5003]: I1206 15:32:48.412194 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:48Z","lastTransitionTime":"2025-12-06T15:32:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:48 crc kubenswrapper[5003]: I1206 15:32:48.514903 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:48 crc kubenswrapper[5003]: I1206 15:32:48.514971 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:48 crc kubenswrapper[5003]: I1206 15:32:48.514988 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:48 crc kubenswrapper[5003]: I1206 15:32:48.515013 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:48 crc kubenswrapper[5003]: I1206 15:32:48.515031 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:48Z","lastTransitionTime":"2025-12-06T15:32:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:48 crc kubenswrapper[5003]: I1206 15:32:48.618615 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:48 crc kubenswrapper[5003]: I1206 15:32:48.618690 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:48 crc kubenswrapper[5003]: I1206 15:32:48.618714 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:48 crc kubenswrapper[5003]: I1206 15:32:48.618746 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:48 crc kubenswrapper[5003]: I1206 15:32:48.618770 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:48Z","lastTransitionTime":"2025-12-06T15:32:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:48 crc kubenswrapper[5003]: I1206 15:32:48.711663 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-jmzd9" Dec 06 15:32:48 crc kubenswrapper[5003]: E1206 15:32:48.711907 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-jmzd9" podUID="9fa121e1-7f2f-4912-945f-86cb199c3014" Dec 06 15:32:48 crc kubenswrapper[5003]: I1206 15:32:48.722288 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:48 crc kubenswrapper[5003]: I1206 15:32:48.722340 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:48 crc kubenswrapper[5003]: I1206 15:32:48.722352 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:48 crc kubenswrapper[5003]: I1206 15:32:48.722366 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:48 crc kubenswrapper[5003]: I1206 15:32:48.722376 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:48Z","lastTransitionTime":"2025-12-06T15:32:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:48 crc kubenswrapper[5003]: I1206 15:32:48.825595 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:48 crc kubenswrapper[5003]: I1206 15:32:48.825657 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:48 crc kubenswrapper[5003]: I1206 15:32:48.825674 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:48 crc kubenswrapper[5003]: I1206 15:32:48.825696 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:48 crc kubenswrapper[5003]: I1206 15:32:48.825713 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:48Z","lastTransitionTime":"2025-12-06T15:32:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:48 crc kubenswrapper[5003]: I1206 15:32:48.889532 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/9fa121e1-7f2f-4912-945f-86cb199c3014-metrics-certs\") pod \"network-metrics-daemon-jmzd9\" (UID: \"9fa121e1-7f2f-4912-945f-86cb199c3014\") " pod="openshift-multus/network-metrics-daemon-jmzd9" Dec 06 15:32:48 crc kubenswrapper[5003]: E1206 15:32:48.889711 5003 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 06 15:32:48 crc kubenswrapper[5003]: E1206 15:32:48.889783 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/9fa121e1-7f2f-4912-945f-86cb199c3014-metrics-certs podName:9fa121e1-7f2f-4912-945f-86cb199c3014 nodeName:}" failed. No retries permitted until 2025-12-06 15:32:56.889764696 +0000 UTC m=+55.423119087 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/9fa121e1-7f2f-4912-945f-86cb199c3014-metrics-certs") pod "network-metrics-daemon-jmzd9" (UID: "9fa121e1-7f2f-4912-945f-86cb199c3014") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 06 15:32:48 crc kubenswrapper[5003]: I1206 15:32:48.928092 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:48 crc kubenswrapper[5003]: I1206 15:32:48.928164 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:48 crc kubenswrapper[5003]: I1206 15:32:48.928181 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:48 crc kubenswrapper[5003]: I1206 15:32:48.928208 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:48 crc kubenswrapper[5003]: I1206 15:32:48.928226 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:48Z","lastTransitionTime":"2025-12-06T15:32:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:49 crc kubenswrapper[5003]: I1206 15:32:49.030607 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:49 crc kubenswrapper[5003]: I1206 15:32:49.030680 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:49 crc kubenswrapper[5003]: I1206 15:32:49.030704 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:49 crc kubenswrapper[5003]: I1206 15:32:49.030734 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:49 crc kubenswrapper[5003]: I1206 15:32:49.030774 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:49Z","lastTransitionTime":"2025-12-06T15:32:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:49 crc kubenswrapper[5003]: I1206 15:32:49.133562 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:49 crc kubenswrapper[5003]: I1206 15:32:49.133608 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:49 crc kubenswrapper[5003]: I1206 15:32:49.133619 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:49 crc kubenswrapper[5003]: I1206 15:32:49.133636 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:49 crc kubenswrapper[5003]: I1206 15:32:49.133647 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:49Z","lastTransitionTime":"2025-12-06T15:32:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:49 crc kubenswrapper[5003]: I1206 15:32:49.235848 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:49 crc kubenswrapper[5003]: I1206 15:32:49.235914 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:49 crc kubenswrapper[5003]: I1206 15:32:49.235937 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:49 crc kubenswrapper[5003]: I1206 15:32:49.235969 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:49 crc kubenswrapper[5003]: I1206 15:32:49.235992 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:49Z","lastTransitionTime":"2025-12-06T15:32:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:49 crc kubenswrapper[5003]: I1206 15:32:49.337936 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:49 crc kubenswrapper[5003]: I1206 15:32:49.337998 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:49 crc kubenswrapper[5003]: I1206 15:32:49.338016 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:49 crc kubenswrapper[5003]: I1206 15:32:49.338040 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:49 crc kubenswrapper[5003]: I1206 15:32:49.338065 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:49Z","lastTransitionTime":"2025-12-06T15:32:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:49 crc kubenswrapper[5003]: I1206 15:32:49.440475 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:49 crc kubenswrapper[5003]: I1206 15:32:49.440543 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:49 crc kubenswrapper[5003]: I1206 15:32:49.440554 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:49 crc kubenswrapper[5003]: I1206 15:32:49.440572 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:49 crc kubenswrapper[5003]: I1206 15:32:49.440585 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:49Z","lastTransitionTime":"2025-12-06T15:32:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:49 crc kubenswrapper[5003]: I1206 15:32:49.543087 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:49 crc kubenswrapper[5003]: I1206 15:32:49.543119 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:49 crc kubenswrapper[5003]: I1206 15:32:49.543132 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:49 crc kubenswrapper[5003]: I1206 15:32:49.543147 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:49 crc kubenswrapper[5003]: I1206 15:32:49.543157 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:49Z","lastTransitionTime":"2025-12-06T15:32:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:49 crc kubenswrapper[5003]: I1206 15:32:49.648025 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:49 crc kubenswrapper[5003]: I1206 15:32:49.648080 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:49 crc kubenswrapper[5003]: I1206 15:32:49.648104 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:49 crc kubenswrapper[5003]: I1206 15:32:49.648128 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:49 crc kubenswrapper[5003]: I1206 15:32:49.648144 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:49Z","lastTransitionTime":"2025-12-06T15:32:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:49 crc kubenswrapper[5003]: I1206 15:32:49.712215 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 06 15:32:49 crc kubenswrapper[5003]: I1206 15:32:49.712299 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 06 15:32:49 crc kubenswrapper[5003]: I1206 15:32:49.712230 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 06 15:32:49 crc kubenswrapper[5003]: E1206 15:32:49.712464 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 06 15:32:49 crc kubenswrapper[5003]: E1206 15:32:49.712601 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 06 15:32:49 crc kubenswrapper[5003]: E1206 15:32:49.712711 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 06 15:32:49 crc kubenswrapper[5003]: I1206 15:32:49.750629 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:49 crc kubenswrapper[5003]: I1206 15:32:49.750680 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:49 crc kubenswrapper[5003]: I1206 15:32:49.750695 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:49 crc kubenswrapper[5003]: I1206 15:32:49.750718 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:49 crc kubenswrapper[5003]: I1206 15:32:49.750734 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:49Z","lastTransitionTime":"2025-12-06T15:32:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:49 crc kubenswrapper[5003]: I1206 15:32:49.853877 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:49 crc kubenswrapper[5003]: I1206 15:32:49.853933 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:49 crc kubenswrapper[5003]: I1206 15:32:49.853945 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:49 crc kubenswrapper[5003]: I1206 15:32:49.853963 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:49 crc kubenswrapper[5003]: I1206 15:32:49.853979 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:49Z","lastTransitionTime":"2025-12-06T15:32:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:49 crc kubenswrapper[5003]: I1206 15:32:49.959038 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:49 crc kubenswrapper[5003]: I1206 15:32:49.959103 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:49 crc kubenswrapper[5003]: I1206 15:32:49.959120 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:49 crc kubenswrapper[5003]: I1206 15:32:49.959141 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:49 crc kubenswrapper[5003]: I1206 15:32:49.959157 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:49Z","lastTransitionTime":"2025-12-06T15:32:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:50 crc kubenswrapper[5003]: I1206 15:32:50.060935 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:50 crc kubenswrapper[5003]: I1206 15:32:50.060983 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:50 crc kubenswrapper[5003]: I1206 15:32:50.060994 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:50 crc kubenswrapper[5003]: I1206 15:32:50.061012 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:50 crc kubenswrapper[5003]: I1206 15:32:50.061025 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:50Z","lastTransitionTime":"2025-12-06T15:32:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:50 crc kubenswrapper[5003]: I1206 15:32:50.164017 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:50 crc kubenswrapper[5003]: I1206 15:32:50.164087 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:50 crc kubenswrapper[5003]: I1206 15:32:50.164104 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:50 crc kubenswrapper[5003]: I1206 15:32:50.164129 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:50 crc kubenswrapper[5003]: I1206 15:32:50.164143 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:50Z","lastTransitionTime":"2025-12-06T15:32:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:50 crc kubenswrapper[5003]: I1206 15:32:50.266389 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:50 crc kubenswrapper[5003]: I1206 15:32:50.266538 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:50 crc kubenswrapper[5003]: I1206 15:32:50.266551 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:50 crc kubenswrapper[5003]: I1206 15:32:50.266564 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:50 crc kubenswrapper[5003]: I1206 15:32:50.266571 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:50Z","lastTransitionTime":"2025-12-06T15:32:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:50 crc kubenswrapper[5003]: I1206 15:32:50.369080 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:50 crc kubenswrapper[5003]: I1206 15:32:50.369151 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:50 crc kubenswrapper[5003]: I1206 15:32:50.369170 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:50 crc kubenswrapper[5003]: I1206 15:32:50.369193 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:50 crc kubenswrapper[5003]: I1206 15:32:50.369209 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:50Z","lastTransitionTime":"2025-12-06T15:32:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:50 crc kubenswrapper[5003]: I1206 15:32:50.471202 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:50 crc kubenswrapper[5003]: I1206 15:32:50.471241 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:50 crc kubenswrapper[5003]: I1206 15:32:50.471251 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:50 crc kubenswrapper[5003]: I1206 15:32:50.471267 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:50 crc kubenswrapper[5003]: I1206 15:32:50.471278 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:50Z","lastTransitionTime":"2025-12-06T15:32:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:50 crc kubenswrapper[5003]: I1206 15:32:50.574658 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:50 crc kubenswrapper[5003]: I1206 15:32:50.574735 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:50 crc kubenswrapper[5003]: I1206 15:32:50.574758 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:50 crc kubenswrapper[5003]: I1206 15:32:50.574790 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:50 crc kubenswrapper[5003]: I1206 15:32:50.574813 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:50Z","lastTransitionTime":"2025-12-06T15:32:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:50 crc kubenswrapper[5003]: I1206 15:32:50.676801 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:50 crc kubenswrapper[5003]: I1206 15:32:50.676867 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:50 crc kubenswrapper[5003]: I1206 15:32:50.676882 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:50 crc kubenswrapper[5003]: I1206 15:32:50.676902 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:50 crc kubenswrapper[5003]: I1206 15:32:50.676916 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:50Z","lastTransitionTime":"2025-12-06T15:32:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:50 crc kubenswrapper[5003]: I1206 15:32:50.712268 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-jmzd9" Dec 06 15:32:50 crc kubenswrapper[5003]: E1206 15:32:50.712413 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-jmzd9" podUID="9fa121e1-7f2f-4912-945f-86cb199c3014" Dec 06 15:32:50 crc kubenswrapper[5003]: I1206 15:32:50.779807 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:50 crc kubenswrapper[5003]: I1206 15:32:50.779848 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:50 crc kubenswrapper[5003]: I1206 15:32:50.779856 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:50 crc kubenswrapper[5003]: I1206 15:32:50.779871 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:50 crc kubenswrapper[5003]: I1206 15:32:50.779881 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:50Z","lastTransitionTime":"2025-12-06T15:32:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:50 crc kubenswrapper[5003]: I1206 15:32:50.882109 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:50 crc kubenswrapper[5003]: I1206 15:32:50.882159 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:50 crc kubenswrapper[5003]: I1206 15:32:50.882172 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:50 crc kubenswrapper[5003]: I1206 15:32:50.882188 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:50 crc kubenswrapper[5003]: I1206 15:32:50.882196 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:50Z","lastTransitionTime":"2025-12-06T15:32:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:50 crc kubenswrapper[5003]: I1206 15:32:50.984792 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:50 crc kubenswrapper[5003]: I1206 15:32:50.984850 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:50 crc kubenswrapper[5003]: I1206 15:32:50.984861 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:50 crc kubenswrapper[5003]: I1206 15:32:50.984878 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:50 crc kubenswrapper[5003]: I1206 15:32:50.984887 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:50Z","lastTransitionTime":"2025-12-06T15:32:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:51 crc kubenswrapper[5003]: I1206 15:32:51.087775 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:51 crc kubenswrapper[5003]: I1206 15:32:51.087848 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:51 crc kubenswrapper[5003]: I1206 15:32:51.087864 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:51 crc kubenswrapper[5003]: I1206 15:32:51.087890 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:51 crc kubenswrapper[5003]: I1206 15:32:51.087908 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:51Z","lastTransitionTime":"2025-12-06T15:32:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:51 crc kubenswrapper[5003]: I1206 15:32:51.190382 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:51 crc kubenswrapper[5003]: I1206 15:32:51.190416 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:51 crc kubenswrapper[5003]: I1206 15:32:51.190432 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:51 crc kubenswrapper[5003]: I1206 15:32:51.190449 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:51 crc kubenswrapper[5003]: I1206 15:32:51.190460 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:51Z","lastTransitionTime":"2025-12-06T15:32:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:51 crc kubenswrapper[5003]: I1206 15:32:51.293767 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:51 crc kubenswrapper[5003]: I1206 15:32:51.293850 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:51 crc kubenswrapper[5003]: I1206 15:32:51.293867 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:51 crc kubenswrapper[5003]: I1206 15:32:51.293889 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:51 crc kubenswrapper[5003]: I1206 15:32:51.293905 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:51Z","lastTransitionTime":"2025-12-06T15:32:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:51 crc kubenswrapper[5003]: I1206 15:32:51.397552 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:51 crc kubenswrapper[5003]: I1206 15:32:51.397630 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:51 crc kubenswrapper[5003]: I1206 15:32:51.397656 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:51 crc kubenswrapper[5003]: I1206 15:32:51.397686 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:51 crc kubenswrapper[5003]: I1206 15:32:51.397708 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:51Z","lastTransitionTime":"2025-12-06T15:32:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:51 crc kubenswrapper[5003]: I1206 15:32:51.500341 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:51 crc kubenswrapper[5003]: I1206 15:32:51.500408 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:51 crc kubenswrapper[5003]: I1206 15:32:51.500425 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:51 crc kubenswrapper[5003]: I1206 15:32:51.500450 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:51 crc kubenswrapper[5003]: I1206 15:32:51.500470 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:51Z","lastTransitionTime":"2025-12-06T15:32:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:51 crc kubenswrapper[5003]: I1206 15:32:51.602810 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:51 crc kubenswrapper[5003]: I1206 15:32:51.602874 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:51 crc kubenswrapper[5003]: I1206 15:32:51.602896 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:51 crc kubenswrapper[5003]: I1206 15:32:51.602930 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:51 crc kubenswrapper[5003]: I1206 15:32:51.602954 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:51Z","lastTransitionTime":"2025-12-06T15:32:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:51 crc kubenswrapper[5003]: I1206 15:32:51.705594 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:51 crc kubenswrapper[5003]: I1206 15:32:51.705711 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:51 crc kubenswrapper[5003]: I1206 15:32:51.705729 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:51 crc kubenswrapper[5003]: I1206 15:32:51.705753 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:51 crc kubenswrapper[5003]: I1206 15:32:51.705771 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:51Z","lastTransitionTime":"2025-12-06T15:32:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:51 crc kubenswrapper[5003]: I1206 15:32:51.712107 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 06 15:32:51 crc kubenswrapper[5003]: E1206 15:32:51.712302 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 06 15:32:51 crc kubenswrapper[5003]: I1206 15:32:51.712319 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 06 15:32:51 crc kubenswrapper[5003]: I1206 15:32:51.712370 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 06 15:32:51 crc kubenswrapper[5003]: E1206 15:32:51.712538 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 06 15:32:51 crc kubenswrapper[5003]: E1206 15:32:51.713966 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 06 15:32:51 crc kubenswrapper[5003]: I1206 15:32:51.714414 5003 scope.go:117] "RemoveContainer" containerID="4c6884100d6c3b4944019e7d9e7a2bdaf2013d697ce3980863742a60a7b52d7f" Dec 06 15:32:51 crc kubenswrapper[5003]: I1206 15:32:51.738839 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:51Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:51 crc kubenswrapper[5003]: I1206 15:32:51.756595 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:24Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:24Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ab9b858ed30211c345e146452a18252752eeedcb3be8054daa2af371297cbb61\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:51Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:51 crc kubenswrapper[5003]: I1206 15:32:51.785380 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7xwd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a695d94-271c-45bc-8a89-dfdecb57ec00\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b26a01a6a0644d88028ebc041e00353091f26407f109e02e4aec1ec0e382a4c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b549c3d1ba39068a4f1ce344e0807eed3b4e73ee1902d02cebffe005697201d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5240dcf05f3d661b92408f4c39999571d023d9acdeb31d4a4a99f50e31a627b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e240d517f92fb2b8e24bb6f1c1d98869b41adf8bc3ab687a4426840c7010235e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b871ea5e9d2093579af10786c02da9d7f5afa5351933be742b67b1b682733119\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://276d6d2f680f4e0b2038d12b161e3fd3f85417b5d776b9661559b4812ead1dd9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4c6884100d6c3b4944019e7d9e7a2bdaf2013d697ce3980863742a60a7b52d7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4c6884100d6c3b4944019e7d9e7a2bdaf2013d697ce3980863742a60a7b52d7f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-06T15:32:39Z\\\",\\\"message\\\":\\\"kipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}, built lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-marketplace/redhat-marketplace_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-marketplace/redhat-marketplace\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.140\\\\\\\", Port:50051, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI1206 15:32:39.139098 6445 ovnkube.go:599] Stopped ovnkube\\\\nI1206 15:32:39.139178 6445 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1206 15:32:39.139236 6445 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to s\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:38Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-p7xwd_openshift-ovn-kubernetes(8a695d94-271c-45bc-8a89-dfdecb57ec00)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c56c8c7aa570bb81c41923b20b09d6de6b278e56ca466e7e0ee3cecf113c37d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a00902f2c2c7ca56d86553f78094f40f59ea9ef125f5a388aafd6d7fd0c20d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9a00902f2c2c7ca56d86553f78094f40f59ea9ef125f5a388aafd6d7fd0c20d9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-p7xwd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:51Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:51 crc kubenswrapper[5003]: I1206 15:32:51.803359 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-jmzd9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9fa121e1-7f2f-4912-945f-86cb199c3014\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:41Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:41Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9s4lw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9s4lw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:41Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-jmzd9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:51Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:51 crc kubenswrapper[5003]: I1206 15:32:51.809338 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:51 crc kubenswrapper[5003]: I1206 15:32:51.809386 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:51 crc kubenswrapper[5003]: I1206 15:32:51.809395 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:51 crc kubenswrapper[5003]: I1206 15:32:51.809410 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:51 crc kubenswrapper[5003]: I1206 15:32:51.809419 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:51Z","lastTransitionTime":"2025-12-06T15:32:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:51 crc kubenswrapper[5003]: I1206 15:32:51.821923 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:51Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:51 crc kubenswrapper[5003]: I1206 15:32:51.859349 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-qcqkl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"956ed4a8-8918-48eb-a2f8-f35a9ae17fde\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8ae5ee47d26422cffca5f12a4ee1455dcd34c148b1b490d69b88280b4497da11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbhrw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-qcqkl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:51Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:51 crc kubenswrapper[5003]: I1206 15:32:51.887704 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-9kdpn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"350e8b9a-b7bf-4dc9-abe9-d10f7a088be3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e9b32883e20da5b298374f7eebd0a21c5ca51cca23543fc78dfe3edb04e3b661\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-46c8r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-9kdpn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:51Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:51 crc kubenswrapper[5003]: I1206 15:32:51.901029 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-7788j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f27a64b8-ecd1-4201-a3bd-a4f5a0aa05a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4bf147399c10dd7e654abd5213c4d90e8aa9feca7f8c6032c16576a25aeace68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twj7c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f47c00cc6d341dbe1a5f3495f04fe4e695370952f6d6b209a5aaff1ace9d17e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twj7c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:39Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-7788j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:51Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:51 crc kubenswrapper[5003]: I1206 15:32:51.911788 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:51 crc kubenswrapper[5003]: I1206 15:32:51.911832 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:51 crc kubenswrapper[5003]: I1206 15:32:51.911841 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:51 crc kubenswrapper[5003]: I1206 15:32:51.911859 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:51 crc kubenswrapper[5003]: I1206 15:32:51.911871 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:51Z","lastTransitionTime":"2025-12-06T15:32:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:51 crc kubenswrapper[5003]: I1206 15:32:51.912461 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:51Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:51 crc kubenswrapper[5003]: I1206 15:32:51.921558 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-mdz5n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2a9b6e3b-054f-40de-9ad6-dd7eb78eaea1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cac97fc8a54c2fbd81b0abade11a987e5226084d8a4f75ddbbaad1f6c4cb9783\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wqmpg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:25Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-mdz5n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:51Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:51 crc kubenswrapper[5003]: I1206 15:32:51.933126 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-w25db" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1a047c4d-003e-4668-9b96-945eab34ab68\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b3a6db6c2c8e6a283f7b46ef28ba7310d3cfc7054cfd8cbcc290ebd0d26dcf4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6dn2k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b457ba1d45f5e1524b8539c4c7dc77b24f6d5aa8172b46962e31e4fb6723b7ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6dn2k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:26Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-w25db\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:51Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:51 crc kubenswrapper[5003]: I1206 15:32:51.951136 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://224b845cf75da20cf87e5435770288110776a0edd92c9c110d6dc539419ecb7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0df37db3b8682032a0065b4ca1cead69bf0b32cb4e2fab89c53e14eb2169b7e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:51Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:51 crc kubenswrapper[5003]: I1206 15:32:51.965942 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-j4rf7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7dc41f9e-e763-4a9a-a064-f65bc24332b9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://467dcc8ed3936c6fb24f6c6c3e42eccf3597ce6920d12b253946a123ee22faf8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://79eb73c778f65e6694b0a3243db40d605557c86145f9b40ce2dfcf922ec7f38f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://79eb73c778f65e6694b0a3243db40d605557c86145f9b40ce2dfcf922ec7f38f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://10cf47f05fda3c702decadc5c1cf0b6fb4df56993610f4d7bc2809e685109f6c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://10cf47f05fda3c702decadc5c1cf0b6fb4df56993610f4d7bc2809e685109f6c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a33d5c66fd9234a89ff1f28e4863f80104b5d1caccd24cafa22a4598a42906c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a33d5c66fd9234a89ff1f28e4863f80104b5d1caccd24cafa22a4598a42906c3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://895f9f2647488793d0875e8cb05a6e6515733dae00a856e6a4705702adc3a5ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://895f9f2647488793d0875e8cb05a6e6515733dae00a856e6a4705702adc3a5ed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a02f680341e0dfb569ffcd7902d5b1fe3d575a423ff541d86c7bdd4d76b733b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a02f680341e0dfb569ffcd7902d5b1fe3d575a423ff541d86c7bdd4d76b733b0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f6cda90c763eedc34903164d30c28d3cb696f658455d0a972fbf8d8e5505a587\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f6cda90c763eedc34903164d30c28d3cb696f658455d0a972fbf8d8e5505a587\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-j4rf7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:51Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:51 crc kubenswrapper[5003]: I1206 15:32:51.985124 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5ae2d36f-5a31-4da3-aae8-0378c481f230\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9f683cdba22afa5d1d069cc32c6c83addf344c8b0ba3b39b7a2ca527408922c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c777cc5d07ff005aa8001da8a566484710f0253e4a6061e3297a884c6153a7d0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ac8291e7fa9a5ff379474802b6eae771542705aef4c443cc3123837d10dd9e5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae2215e89d65c7bd69288cda74cdd83f0349f57b47f80ff1fa03d60a484558b5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://efc65bc9bb60202069cb51eed80fb62e527399c4189554e791bab3e23993c95c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-06T15:32:20Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1206 15:32:14.957831 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1206 15:32:14.959306 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1213261896/tls.crt::/tmp/serving-cert-1213261896/tls.key\\\\\\\"\\\\nI1206 15:32:20.684925 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1206 15:32:20.690335 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1206 15:32:20.690422 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1206 15:32:20.690568 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1206 15:32:20.690599 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1206 15:32:20.700990 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1206 15:32:20.701016 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1206 15:32:20.701048 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1206 15:32:20.701062 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1206 15:32:20.701075 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1206 15:32:20.701085 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1206 15:32:20.701093 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1206 15:32:20.701100 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1206 15:32:20.702607 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:04Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6013b3a95ef99138e6ea971b51a45f65923c09435a2595bba7af91eebb28d76\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4256540a96bdcd018eb514cf70eea305513bb418afa89c8bfa5179c67822380e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4256540a96bdcd018eb514cf70eea305513bb418afa89c8bfa5179c67822380e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:51Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:52 crc kubenswrapper[5003]: I1206 15:32:52.000123 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c48c2555-6e1a-4e2a-801c-de22b016a80d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d0880a937c71f4a7b46ffc4dcc10f50f5db23655dedb5d7e95b36cf0fcb44928\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f5d5d47cea1d06df2f36a0a15126c2419e5051d84842c781921577bec3c65f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5e7cb74b8b6e2a00de6dbbd1689b52fcc3ae9862d40685bab7a805646abd4a3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3377599a5ceebbce7bd9d45a6f78122fc48d86ff917a6dcfc03304ca3361659\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:02Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:51Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:52 crc kubenswrapper[5003]: I1206 15:32:52.013890 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:52 crc kubenswrapper[5003]: I1206 15:32:52.013920 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:52 crc kubenswrapper[5003]: I1206 15:32:52.013928 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:52 crc kubenswrapper[5003]: I1206 15:32:52.013940 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:52 crc kubenswrapper[5003]: I1206 15:32:52.013948 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:52Z","lastTransitionTime":"2025-12-06T15:32:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:52 crc kubenswrapper[5003]: I1206 15:32:52.017649 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa744e792aa97558246eaa95c80a307138b1a6e08fe5de144b3fff62f3932e20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:52Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:52 crc kubenswrapper[5003]: I1206 15:32:52.037197 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-p7xwd_8a695d94-271c-45bc-8a89-dfdecb57ec00/ovnkube-controller/1.log" Dec 06 15:32:52 crc kubenswrapper[5003]: I1206 15:32:52.039442 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7xwd" event={"ID":"8a695d94-271c-45bc-8a89-dfdecb57ec00","Type":"ContainerStarted","Data":"2f0ad17b1a040807cb7cca5ee53ef54da1870f63cadccb3aa20421b08016cf10"} Dec 06 15:32:52 crc kubenswrapper[5003]: I1206 15:32:52.039915 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-p7xwd" Dec 06 15:32:52 crc kubenswrapper[5003]: I1206 15:32:52.051974 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:52Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:52 crc kubenswrapper[5003]: I1206 15:32:52.062317 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:24Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:24Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ab9b858ed30211c345e146452a18252752eeedcb3be8054daa2af371297cbb61\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:52Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:52 crc kubenswrapper[5003]: I1206 15:32:52.086186 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7xwd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a695d94-271c-45bc-8a89-dfdecb57ec00\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b26a01a6a0644d88028ebc041e00353091f26407f109e02e4aec1ec0e382a4c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b549c3d1ba39068a4f1ce344e0807eed3b4e73ee1902d02cebffe005697201d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5240dcf05f3d661b92408f4c39999571d023d9acdeb31d4a4a99f50e31a627b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e240d517f92fb2b8e24bb6f1c1d98869b41adf8bc3ab687a4426840c7010235e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b871ea5e9d2093579af10786c02da9d7f5afa5351933be742b67b1b682733119\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://276d6d2f680f4e0b2038d12b161e3fd3f85417b5d776b9661559b4812ead1dd9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f0ad17b1a040807cb7cca5ee53ef54da1870f63cadccb3aa20421b08016cf10\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4c6884100d6c3b4944019e7d9e7a2bdaf2013d697ce3980863742a60a7b52d7f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-06T15:32:39Z\\\",\\\"message\\\":\\\"kipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}, built lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-marketplace/redhat-marketplace_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-marketplace/redhat-marketplace\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.140\\\\\\\", Port:50051, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI1206 15:32:39.139098 6445 ovnkube.go:599] Stopped ovnkube\\\\nI1206 15:32:39.139178 6445 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1206 15:32:39.139236 6445 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to s\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:38Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c56c8c7aa570bb81c41923b20b09d6de6b278e56ca466e7e0ee3cecf113c37d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a00902f2c2c7ca56d86553f78094f40f59ea9ef125f5a388aafd6d7fd0c20d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9a00902f2c2c7ca56d86553f78094f40f59ea9ef125f5a388aafd6d7fd0c20d9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-p7xwd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:52Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:52 crc kubenswrapper[5003]: I1206 15:32:52.103298 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:52Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:52 crc kubenswrapper[5003]: I1206 15:32:52.116330 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:52 crc kubenswrapper[5003]: I1206 15:32:52.116386 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:52 crc kubenswrapper[5003]: I1206 15:32:52.116396 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:52 crc kubenswrapper[5003]: I1206 15:32:52.116413 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:52 crc kubenswrapper[5003]: I1206 15:32:52.116422 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:52Z","lastTransitionTime":"2025-12-06T15:32:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:52 crc kubenswrapper[5003]: I1206 15:32:52.118556 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-qcqkl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"956ed4a8-8918-48eb-a2f8-f35a9ae17fde\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8ae5ee47d26422cffca5f12a4ee1455dcd34c148b1b490d69b88280b4497da11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbhrw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-qcqkl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:52Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:52 crc kubenswrapper[5003]: I1206 15:32:52.133717 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-9kdpn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"350e8b9a-b7bf-4dc9-abe9-d10f7a088be3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e9b32883e20da5b298374f7eebd0a21c5ca51cca23543fc78dfe3edb04e3b661\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-46c8r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-9kdpn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:52Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:52 crc kubenswrapper[5003]: I1206 15:32:52.146867 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-jmzd9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9fa121e1-7f2f-4912-945f-86cb199c3014\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:41Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:41Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9s4lw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9s4lw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:41Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-jmzd9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:52Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:52 crc kubenswrapper[5003]: I1206 15:32:52.187260 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:52Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:52 crc kubenswrapper[5003]: I1206 15:32:52.198252 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-mdz5n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2a9b6e3b-054f-40de-9ad6-dd7eb78eaea1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cac97fc8a54c2fbd81b0abade11a987e5226084d8a4f75ddbbaad1f6c4cb9783\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wqmpg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:25Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-mdz5n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:52Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:52 crc kubenswrapper[5003]: I1206 15:32:52.210712 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-w25db" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1a047c4d-003e-4668-9b96-945eab34ab68\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b3a6db6c2c8e6a283f7b46ef28ba7310d3cfc7054cfd8cbcc290ebd0d26dcf4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6dn2k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b457ba1d45f5e1524b8539c4c7dc77b24f6d5aa8172b46962e31e4fb6723b7ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6dn2k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:26Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-w25db\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:52Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:52 crc kubenswrapper[5003]: I1206 15:32:52.218537 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:52 crc kubenswrapper[5003]: I1206 15:32:52.218576 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:52 crc kubenswrapper[5003]: I1206 15:32:52.218587 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:52 crc kubenswrapper[5003]: I1206 15:32:52.218602 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:52 crc kubenswrapper[5003]: I1206 15:32:52.218613 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:52Z","lastTransitionTime":"2025-12-06T15:32:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:52 crc kubenswrapper[5003]: I1206 15:32:52.223934 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-7788j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f27a64b8-ecd1-4201-a3bd-a4f5a0aa05a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4bf147399c10dd7e654abd5213c4d90e8aa9feca7f8c6032c16576a25aeace68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twj7c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f47c00cc6d341dbe1a5f3495f04fe4e695370952f6d6b209a5aaff1ace9d17e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twj7c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:39Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-7788j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:52Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:52 crc kubenswrapper[5003]: I1206 15:32:52.240634 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5ae2d36f-5a31-4da3-aae8-0378c481f230\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9f683cdba22afa5d1d069cc32c6c83addf344c8b0ba3b39b7a2ca527408922c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c777cc5d07ff005aa8001da8a566484710f0253e4a6061e3297a884c6153a7d0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ac8291e7fa9a5ff379474802b6eae771542705aef4c443cc3123837d10dd9e5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae2215e89d65c7bd69288cda74cdd83f0349f57b47f80ff1fa03d60a484558b5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://efc65bc9bb60202069cb51eed80fb62e527399c4189554e791bab3e23993c95c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-06T15:32:20Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1206 15:32:14.957831 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1206 15:32:14.959306 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1213261896/tls.crt::/tmp/serving-cert-1213261896/tls.key\\\\\\\"\\\\nI1206 15:32:20.684925 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1206 15:32:20.690335 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1206 15:32:20.690422 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1206 15:32:20.690568 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1206 15:32:20.690599 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1206 15:32:20.700990 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1206 15:32:20.701016 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1206 15:32:20.701048 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1206 15:32:20.701062 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1206 15:32:20.701075 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1206 15:32:20.701085 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1206 15:32:20.701093 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1206 15:32:20.701100 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1206 15:32:20.702607 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:04Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6013b3a95ef99138e6ea971b51a45f65923c09435a2595bba7af91eebb28d76\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4256540a96bdcd018eb514cf70eea305513bb418afa89c8bfa5179c67822380e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4256540a96bdcd018eb514cf70eea305513bb418afa89c8bfa5179c67822380e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:52Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:52 crc kubenswrapper[5003]: I1206 15:32:52.256874 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c48c2555-6e1a-4e2a-801c-de22b016a80d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d0880a937c71f4a7b46ffc4dcc10f50f5db23655dedb5d7e95b36cf0fcb44928\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f5d5d47cea1d06df2f36a0a15126c2419e5051d84842c781921577bec3c65f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5e7cb74b8b6e2a00de6dbbd1689b52fcc3ae9862d40685bab7a805646abd4a3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3377599a5ceebbce7bd9d45a6f78122fc48d86ff917a6dcfc03304ca3361659\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:02Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:52Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:52 crc kubenswrapper[5003]: I1206 15:32:52.269072 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa744e792aa97558246eaa95c80a307138b1a6e08fe5de144b3fff62f3932e20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:52Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:52 crc kubenswrapper[5003]: I1206 15:32:52.282997 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://224b845cf75da20cf87e5435770288110776a0edd92c9c110d6dc539419ecb7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0df37db3b8682032a0065b4ca1cead69bf0b32cb4e2fab89c53e14eb2169b7e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:52Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:52 crc kubenswrapper[5003]: I1206 15:32:52.300855 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-j4rf7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7dc41f9e-e763-4a9a-a064-f65bc24332b9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://467dcc8ed3936c6fb24f6c6c3e42eccf3597ce6920d12b253946a123ee22faf8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://79eb73c778f65e6694b0a3243db40d605557c86145f9b40ce2dfcf922ec7f38f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://79eb73c778f65e6694b0a3243db40d605557c86145f9b40ce2dfcf922ec7f38f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://10cf47f05fda3c702decadc5c1cf0b6fb4df56993610f4d7bc2809e685109f6c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://10cf47f05fda3c702decadc5c1cf0b6fb4df56993610f4d7bc2809e685109f6c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a33d5c66fd9234a89ff1f28e4863f80104b5d1caccd24cafa22a4598a42906c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a33d5c66fd9234a89ff1f28e4863f80104b5d1caccd24cafa22a4598a42906c3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://895f9f2647488793d0875e8cb05a6e6515733dae00a856e6a4705702adc3a5ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://895f9f2647488793d0875e8cb05a6e6515733dae00a856e6a4705702adc3a5ed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a02f680341e0dfb569ffcd7902d5b1fe3d575a423ff541d86c7bdd4d76b733b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a02f680341e0dfb569ffcd7902d5b1fe3d575a423ff541d86c7bdd4d76b733b0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f6cda90c763eedc34903164d30c28d3cb696f658455d0a972fbf8d8e5505a587\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f6cda90c763eedc34903164d30c28d3cb696f658455d0a972fbf8d8e5505a587\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-j4rf7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:52Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:52 crc kubenswrapper[5003]: I1206 15:32:52.321447 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:52 crc kubenswrapper[5003]: I1206 15:32:52.321517 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:52 crc kubenswrapper[5003]: I1206 15:32:52.321530 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:52 crc kubenswrapper[5003]: I1206 15:32:52.321549 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:52 crc kubenswrapper[5003]: I1206 15:32:52.321560 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:52Z","lastTransitionTime":"2025-12-06T15:32:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:52 crc kubenswrapper[5003]: I1206 15:32:52.423837 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:52 crc kubenswrapper[5003]: I1206 15:32:52.423882 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:52 crc kubenswrapper[5003]: I1206 15:32:52.423892 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:52 crc kubenswrapper[5003]: I1206 15:32:52.423919 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:52 crc kubenswrapper[5003]: I1206 15:32:52.423931 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:52Z","lastTransitionTime":"2025-12-06T15:32:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:52 crc kubenswrapper[5003]: I1206 15:32:52.526181 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:52 crc kubenswrapper[5003]: I1206 15:32:52.526300 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:52 crc kubenswrapper[5003]: I1206 15:32:52.526326 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:52 crc kubenswrapper[5003]: I1206 15:32:52.526352 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:52 crc kubenswrapper[5003]: I1206 15:32:52.526371 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:52Z","lastTransitionTime":"2025-12-06T15:32:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:52 crc kubenswrapper[5003]: I1206 15:32:52.629180 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:52 crc kubenswrapper[5003]: I1206 15:32:52.629225 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:52 crc kubenswrapper[5003]: I1206 15:32:52.629235 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:52 crc kubenswrapper[5003]: I1206 15:32:52.629253 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:52 crc kubenswrapper[5003]: I1206 15:32:52.629265 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:52Z","lastTransitionTime":"2025-12-06T15:32:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:52 crc kubenswrapper[5003]: I1206 15:32:52.711808 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-jmzd9" Dec 06 15:32:52 crc kubenswrapper[5003]: E1206 15:32:52.711990 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-jmzd9" podUID="9fa121e1-7f2f-4912-945f-86cb199c3014" Dec 06 15:32:52 crc kubenswrapper[5003]: I1206 15:32:52.731809 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:52 crc kubenswrapper[5003]: I1206 15:32:52.731858 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:52 crc kubenswrapper[5003]: I1206 15:32:52.731870 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:52 crc kubenswrapper[5003]: I1206 15:32:52.731887 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:52 crc kubenswrapper[5003]: I1206 15:32:52.731900 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:52Z","lastTransitionTime":"2025-12-06T15:32:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:52 crc kubenswrapper[5003]: I1206 15:32:52.833871 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:52 crc kubenswrapper[5003]: I1206 15:32:52.833900 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:52 crc kubenswrapper[5003]: I1206 15:32:52.833908 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:52 crc kubenswrapper[5003]: I1206 15:32:52.833922 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:52 crc kubenswrapper[5003]: I1206 15:32:52.833930 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:52Z","lastTransitionTime":"2025-12-06T15:32:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:52 crc kubenswrapper[5003]: I1206 15:32:52.936101 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:52 crc kubenswrapper[5003]: I1206 15:32:52.936136 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:52 crc kubenswrapper[5003]: I1206 15:32:52.936145 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:52 crc kubenswrapper[5003]: I1206 15:32:52.936158 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:52 crc kubenswrapper[5003]: I1206 15:32:52.936166 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:52Z","lastTransitionTime":"2025-12-06T15:32:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:53 crc kubenswrapper[5003]: I1206 15:32:53.038819 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:53 crc kubenswrapper[5003]: I1206 15:32:53.038846 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:53 crc kubenswrapper[5003]: I1206 15:32:53.038856 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:53 crc kubenswrapper[5003]: I1206 15:32:53.038867 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:53 crc kubenswrapper[5003]: I1206 15:32:53.038876 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:53Z","lastTransitionTime":"2025-12-06T15:32:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:53 crc kubenswrapper[5003]: I1206 15:32:53.140554 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:53 crc kubenswrapper[5003]: I1206 15:32:53.140604 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:53 crc kubenswrapper[5003]: I1206 15:32:53.140616 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:53 crc kubenswrapper[5003]: I1206 15:32:53.140638 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:53 crc kubenswrapper[5003]: I1206 15:32:53.140651 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:53Z","lastTransitionTime":"2025-12-06T15:32:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:53 crc kubenswrapper[5003]: I1206 15:32:53.243097 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:53 crc kubenswrapper[5003]: I1206 15:32:53.243148 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:53 crc kubenswrapper[5003]: I1206 15:32:53.243161 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:53 crc kubenswrapper[5003]: I1206 15:32:53.243183 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:53 crc kubenswrapper[5003]: I1206 15:32:53.243195 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:53Z","lastTransitionTime":"2025-12-06T15:32:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:53 crc kubenswrapper[5003]: I1206 15:32:53.346143 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:53 crc kubenswrapper[5003]: I1206 15:32:53.346212 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:53 crc kubenswrapper[5003]: I1206 15:32:53.346235 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:53 crc kubenswrapper[5003]: I1206 15:32:53.346263 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:53 crc kubenswrapper[5003]: I1206 15:32:53.346286 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:53Z","lastTransitionTime":"2025-12-06T15:32:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:53 crc kubenswrapper[5003]: I1206 15:32:53.449147 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:53 crc kubenswrapper[5003]: I1206 15:32:53.449226 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:53 crc kubenswrapper[5003]: I1206 15:32:53.449249 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:53 crc kubenswrapper[5003]: I1206 15:32:53.449277 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:53 crc kubenswrapper[5003]: I1206 15:32:53.449301 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:53Z","lastTransitionTime":"2025-12-06T15:32:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:53 crc kubenswrapper[5003]: I1206 15:32:53.552317 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:53 crc kubenswrapper[5003]: I1206 15:32:53.552407 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:53 crc kubenswrapper[5003]: I1206 15:32:53.552437 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:53 crc kubenswrapper[5003]: I1206 15:32:53.552469 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:53 crc kubenswrapper[5003]: I1206 15:32:53.552531 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:53Z","lastTransitionTime":"2025-12-06T15:32:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:53 crc kubenswrapper[5003]: I1206 15:32:53.645108 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 06 15:32:53 crc kubenswrapper[5003]: I1206 15:32:53.645242 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 06 15:32:53 crc kubenswrapper[5003]: I1206 15:32:53.645302 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 06 15:32:53 crc kubenswrapper[5003]: E1206 15:32:53.645408 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-06 15:33:25.645371645 +0000 UTC m=+84.178726066 (durationBeforeRetry 32s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 15:32:53 crc kubenswrapper[5003]: E1206 15:32:53.645436 5003 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 06 15:32:53 crc kubenswrapper[5003]: I1206 15:32:53.645552 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 06 15:32:53 crc kubenswrapper[5003]: E1206 15:32:53.645556 5003 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 06 15:32:53 crc kubenswrapper[5003]: E1206 15:32:53.645654 5003 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 06 15:32:53 crc kubenswrapper[5003]: E1206 15:32:53.645680 5003 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 06 15:32:53 crc kubenswrapper[5003]: E1206 15:32:53.645574 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-06 15:33:25.64554686 +0000 UTC m=+84.178901281 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 06 15:32:53 crc kubenswrapper[5003]: I1206 15:32:53.645774 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 06 15:32:53 crc kubenswrapper[5003]: E1206 15:32:53.645779 5003 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 06 15:32:53 crc kubenswrapper[5003]: E1206 15:32:53.645851 5003 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 06 15:32:53 crc kubenswrapper[5003]: E1206 15:32:53.645850 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-06 15:33:25.645814707 +0000 UTC m=+84.179169128 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 06 15:32:53 crc kubenswrapper[5003]: E1206 15:32:53.645965 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-06 15:33:25.645944201 +0000 UTC m=+84.179298592 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 06 15:32:53 crc kubenswrapper[5003]: E1206 15:32:53.645854 5003 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 06 15:32:53 crc kubenswrapper[5003]: E1206 15:32:53.645987 5003 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 06 15:32:53 crc kubenswrapper[5003]: E1206 15:32:53.646032 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-06 15:33:25.646023464 +0000 UTC m=+84.179377865 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 06 15:32:53 crc kubenswrapper[5003]: I1206 15:32:53.655032 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:53 crc kubenswrapper[5003]: I1206 15:32:53.655084 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:53 crc kubenswrapper[5003]: I1206 15:32:53.655100 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:53 crc kubenswrapper[5003]: I1206 15:32:53.655123 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:53 crc kubenswrapper[5003]: I1206 15:32:53.655140 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:53Z","lastTransitionTime":"2025-12-06T15:32:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:53 crc kubenswrapper[5003]: I1206 15:32:53.712432 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 06 15:32:53 crc kubenswrapper[5003]: I1206 15:32:53.712542 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 06 15:32:53 crc kubenswrapper[5003]: E1206 15:32:53.712701 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 06 15:32:53 crc kubenswrapper[5003]: I1206 15:32:53.712729 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 06 15:32:53 crc kubenswrapper[5003]: E1206 15:32:53.712895 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 06 15:32:53 crc kubenswrapper[5003]: E1206 15:32:53.713083 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 06 15:32:53 crc kubenswrapper[5003]: I1206 15:32:53.757660 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:53 crc kubenswrapper[5003]: I1206 15:32:53.757714 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:53 crc kubenswrapper[5003]: I1206 15:32:53.757732 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:53 crc kubenswrapper[5003]: I1206 15:32:53.757753 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:53 crc kubenswrapper[5003]: I1206 15:32:53.757770 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:53Z","lastTransitionTime":"2025-12-06T15:32:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:53 crc kubenswrapper[5003]: I1206 15:32:53.860536 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:53 crc kubenswrapper[5003]: I1206 15:32:53.860585 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:53 crc kubenswrapper[5003]: I1206 15:32:53.860598 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:53 crc kubenswrapper[5003]: I1206 15:32:53.860618 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:53 crc kubenswrapper[5003]: I1206 15:32:53.860632 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:53Z","lastTransitionTime":"2025-12-06T15:32:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:53 crc kubenswrapper[5003]: I1206 15:32:53.963633 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:53 crc kubenswrapper[5003]: I1206 15:32:53.963687 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:53 crc kubenswrapper[5003]: I1206 15:32:53.963700 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:53 crc kubenswrapper[5003]: I1206 15:32:53.963715 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:53 crc kubenswrapper[5003]: I1206 15:32:53.963726 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:53Z","lastTransitionTime":"2025-12-06T15:32:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:54 crc kubenswrapper[5003]: I1206 15:32:54.053613 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-p7xwd_8a695d94-271c-45bc-8a89-dfdecb57ec00/ovnkube-controller/2.log" Dec 06 15:32:54 crc kubenswrapper[5003]: I1206 15:32:54.055198 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-p7xwd_8a695d94-271c-45bc-8a89-dfdecb57ec00/ovnkube-controller/1.log" Dec 06 15:32:54 crc kubenswrapper[5003]: I1206 15:32:54.058386 5003 generic.go:334] "Generic (PLEG): container finished" podID="8a695d94-271c-45bc-8a89-dfdecb57ec00" containerID="2f0ad17b1a040807cb7cca5ee53ef54da1870f63cadccb3aa20421b08016cf10" exitCode=1 Dec 06 15:32:54 crc kubenswrapper[5003]: I1206 15:32:54.058429 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7xwd" event={"ID":"8a695d94-271c-45bc-8a89-dfdecb57ec00","Type":"ContainerDied","Data":"2f0ad17b1a040807cb7cca5ee53ef54da1870f63cadccb3aa20421b08016cf10"} Dec 06 15:32:54 crc kubenswrapper[5003]: I1206 15:32:54.058467 5003 scope.go:117] "RemoveContainer" containerID="4c6884100d6c3b4944019e7d9e7a2bdaf2013d697ce3980863742a60a7b52d7f" Dec 06 15:32:54 crc kubenswrapper[5003]: I1206 15:32:54.059563 5003 scope.go:117] "RemoveContainer" containerID="2f0ad17b1a040807cb7cca5ee53ef54da1870f63cadccb3aa20421b08016cf10" Dec 06 15:32:54 crc kubenswrapper[5003]: E1206 15:32:54.059809 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-p7xwd_openshift-ovn-kubernetes(8a695d94-271c-45bc-8a89-dfdecb57ec00)\"" pod="openshift-ovn-kubernetes/ovnkube-node-p7xwd" podUID="8a695d94-271c-45bc-8a89-dfdecb57ec00" Dec 06 15:32:54 crc kubenswrapper[5003]: I1206 15:32:54.065710 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:54 crc kubenswrapper[5003]: I1206 15:32:54.065745 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:54 crc kubenswrapper[5003]: I1206 15:32:54.065764 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:54 crc kubenswrapper[5003]: I1206 15:32:54.065788 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:54 crc kubenswrapper[5003]: I1206 15:32:54.065806 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:54Z","lastTransitionTime":"2025-12-06T15:32:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:54 crc kubenswrapper[5003]: I1206 15:32:54.075501 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5ae2d36f-5a31-4da3-aae8-0378c481f230\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9f683cdba22afa5d1d069cc32c6c83addf344c8b0ba3b39b7a2ca527408922c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c777cc5d07ff005aa8001da8a566484710f0253e4a6061e3297a884c6153a7d0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ac8291e7fa9a5ff379474802b6eae771542705aef4c443cc3123837d10dd9e5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae2215e89d65c7bd69288cda74cdd83f0349f57b47f80ff1fa03d60a484558b5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://efc65bc9bb60202069cb51eed80fb62e527399c4189554e791bab3e23993c95c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-06T15:32:20Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1206 15:32:14.957831 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1206 15:32:14.959306 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1213261896/tls.crt::/tmp/serving-cert-1213261896/tls.key\\\\\\\"\\\\nI1206 15:32:20.684925 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1206 15:32:20.690335 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1206 15:32:20.690422 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1206 15:32:20.690568 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1206 15:32:20.690599 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1206 15:32:20.700990 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1206 15:32:20.701016 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1206 15:32:20.701048 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1206 15:32:20.701062 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1206 15:32:20.701075 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1206 15:32:20.701085 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1206 15:32:20.701093 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1206 15:32:20.701100 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1206 15:32:20.702607 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:04Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6013b3a95ef99138e6ea971b51a45f65923c09435a2595bba7af91eebb28d76\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4256540a96bdcd018eb514cf70eea305513bb418afa89c8bfa5179c67822380e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4256540a96bdcd018eb514cf70eea305513bb418afa89c8bfa5179c67822380e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:54Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:54 crc kubenswrapper[5003]: I1206 15:32:54.091706 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c48c2555-6e1a-4e2a-801c-de22b016a80d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d0880a937c71f4a7b46ffc4dcc10f50f5db23655dedb5d7e95b36cf0fcb44928\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f5d5d47cea1d06df2f36a0a15126c2419e5051d84842c781921577bec3c65f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5e7cb74b8b6e2a00de6dbbd1689b52fcc3ae9862d40685bab7a805646abd4a3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3377599a5ceebbce7bd9d45a6f78122fc48d86ff917a6dcfc03304ca3361659\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:02Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:54Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:54 crc kubenswrapper[5003]: I1206 15:32:54.104288 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa744e792aa97558246eaa95c80a307138b1a6e08fe5de144b3fff62f3932e20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:54Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:54 crc kubenswrapper[5003]: I1206 15:32:54.124780 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://224b845cf75da20cf87e5435770288110776a0edd92c9c110d6dc539419ecb7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0df37db3b8682032a0065b4ca1cead69bf0b32cb4e2fab89c53e14eb2169b7e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:54Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:54 crc kubenswrapper[5003]: I1206 15:32:54.141967 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-j4rf7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7dc41f9e-e763-4a9a-a064-f65bc24332b9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://467dcc8ed3936c6fb24f6c6c3e42eccf3597ce6920d12b253946a123ee22faf8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://79eb73c778f65e6694b0a3243db40d605557c86145f9b40ce2dfcf922ec7f38f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://79eb73c778f65e6694b0a3243db40d605557c86145f9b40ce2dfcf922ec7f38f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://10cf47f05fda3c702decadc5c1cf0b6fb4df56993610f4d7bc2809e685109f6c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://10cf47f05fda3c702decadc5c1cf0b6fb4df56993610f4d7bc2809e685109f6c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a33d5c66fd9234a89ff1f28e4863f80104b5d1caccd24cafa22a4598a42906c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a33d5c66fd9234a89ff1f28e4863f80104b5d1caccd24cafa22a4598a42906c3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://895f9f2647488793d0875e8cb05a6e6515733dae00a856e6a4705702adc3a5ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://895f9f2647488793d0875e8cb05a6e6515733dae00a856e6a4705702adc3a5ed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a02f680341e0dfb569ffcd7902d5b1fe3d575a423ff541d86c7bdd4d76b733b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a02f680341e0dfb569ffcd7902d5b1fe3d575a423ff541d86c7bdd4d76b733b0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f6cda90c763eedc34903164d30c28d3cb696f658455d0a972fbf8d8e5505a587\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f6cda90c763eedc34903164d30c28d3cb696f658455d0a972fbf8d8e5505a587\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-j4rf7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:54Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:54 crc kubenswrapper[5003]: I1206 15:32:54.158017 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:54Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:54 crc kubenswrapper[5003]: I1206 15:32:54.169352 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:54 crc kubenswrapper[5003]: I1206 15:32:54.169420 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:54 crc kubenswrapper[5003]: I1206 15:32:54.169439 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:54 crc kubenswrapper[5003]: I1206 15:32:54.169548 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:54 crc kubenswrapper[5003]: I1206 15:32:54.169569 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:54Z","lastTransitionTime":"2025-12-06T15:32:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:54 crc kubenswrapper[5003]: I1206 15:32:54.176305 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:24Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:24Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ab9b858ed30211c345e146452a18252752eeedcb3be8054daa2af371297cbb61\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:54Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:54 crc kubenswrapper[5003]: I1206 15:32:54.197444 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7xwd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a695d94-271c-45bc-8a89-dfdecb57ec00\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b26a01a6a0644d88028ebc041e00353091f26407f109e02e4aec1ec0e382a4c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b549c3d1ba39068a4f1ce344e0807eed3b4e73ee1902d02cebffe005697201d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5240dcf05f3d661b92408f4c39999571d023d9acdeb31d4a4a99f50e31a627b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e240d517f92fb2b8e24bb6f1c1d98869b41adf8bc3ab687a4426840c7010235e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b871ea5e9d2093579af10786c02da9d7f5afa5351933be742b67b1b682733119\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://276d6d2f680f4e0b2038d12b161e3fd3f85417b5d776b9661559b4812ead1dd9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f0ad17b1a040807cb7cca5ee53ef54da1870f63cadccb3aa20421b08016cf10\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4c6884100d6c3b4944019e7d9e7a2bdaf2013d697ce3980863742a60a7b52d7f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-06T15:32:39Z\\\",\\\"message\\\":\\\"kipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}, built lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-marketplace/redhat-marketplace_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-marketplace/redhat-marketplace\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.140\\\\\\\", Port:50051, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI1206 15:32:39.139098 6445 ovnkube.go:599] Stopped ovnkube\\\\nI1206 15:32:39.139178 6445 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1206 15:32:39.139236 6445 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to s\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:38Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2f0ad17b1a040807cb7cca5ee53ef54da1870f63cadccb3aa20421b08016cf10\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-06T15:32:53Z\\\",\\\"message\\\":\\\"d44-bbd8-dba87b7dbaf0}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1206 15:32:53.311445 6646 ovn.go:134] Ensuring zone local for Pod openshift-multus/multus-additional-cni-plugins-j4rf7 in node crc\\\\nI1206 15:32:53.311457 6646 obj_retry.go:386] Retry successful for *v1.Pod openshift-multus/multus-additional-cni-plugins-j4rf7 after 0 failed attempt(s)\\\\nI1206 15:32:53.311473 6646 default_network_controller.go:776] Recording success event on pod openshift-multus/multus-additional-cni-plugins-j4rf7\\\\nF1206 15:32:53.311504 6646 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:53Z is after 2025-08-24\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c56c8c7aa570bb81c41923b20b09d6de6b278e56ca466e7e0ee3cecf113c37d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a00902f2c2c7ca56d86553f78094f40f59ea9ef125f5a388aafd6d7fd0c20d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9a00902f2c2c7ca56d86553f78094f40f59ea9ef125f5a388aafd6d7fd0c20d9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-p7xwd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:54Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:54 crc kubenswrapper[5003]: I1206 15:32:54.220202 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:54Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:54 crc kubenswrapper[5003]: I1206 15:32:54.233060 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-qcqkl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"956ed4a8-8918-48eb-a2f8-f35a9ae17fde\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8ae5ee47d26422cffca5f12a4ee1455dcd34c148b1b490d69b88280b4497da11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbhrw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-qcqkl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:54Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:54 crc kubenswrapper[5003]: I1206 15:32:54.249176 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-9kdpn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"350e8b9a-b7bf-4dc9-abe9-d10f7a088be3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e9b32883e20da5b298374f7eebd0a21c5ca51cca23543fc78dfe3edb04e3b661\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-46c8r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-9kdpn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:54Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:54 crc kubenswrapper[5003]: I1206 15:32:54.259353 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-jmzd9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9fa121e1-7f2f-4912-945f-86cb199c3014\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:41Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:41Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9s4lw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9s4lw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:41Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-jmzd9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:54Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:54 crc kubenswrapper[5003]: I1206 15:32:54.271834 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:54 crc kubenswrapper[5003]: I1206 15:32:54.271862 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:54 crc kubenswrapper[5003]: I1206 15:32:54.271870 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:54 crc kubenswrapper[5003]: I1206 15:32:54.271884 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:54 crc kubenswrapper[5003]: I1206 15:32:54.271893 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:54Z","lastTransitionTime":"2025-12-06T15:32:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:54 crc kubenswrapper[5003]: I1206 15:32:54.272168 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:54Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:54 crc kubenswrapper[5003]: I1206 15:32:54.287727 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-mdz5n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2a9b6e3b-054f-40de-9ad6-dd7eb78eaea1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cac97fc8a54c2fbd81b0abade11a987e5226084d8a4f75ddbbaad1f6c4cb9783\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wqmpg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:25Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-mdz5n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:54Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:54 crc kubenswrapper[5003]: I1206 15:32:54.301640 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-w25db" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1a047c4d-003e-4668-9b96-945eab34ab68\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b3a6db6c2c8e6a283f7b46ef28ba7310d3cfc7054cfd8cbcc290ebd0d26dcf4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6dn2k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b457ba1d45f5e1524b8539c4c7dc77b24f6d5aa8172b46962e31e4fb6723b7ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6dn2k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:26Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-w25db\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:54Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:54 crc kubenswrapper[5003]: I1206 15:32:54.311740 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-7788j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f27a64b8-ecd1-4201-a3bd-a4f5a0aa05a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4bf147399c10dd7e654abd5213c4d90e8aa9feca7f8c6032c16576a25aeace68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twj7c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f47c00cc6d341dbe1a5f3495f04fe4e695370952f6d6b209a5aaff1ace9d17e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twj7c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:39Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-7788j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:54Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:54 crc kubenswrapper[5003]: I1206 15:32:54.374263 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:54 crc kubenswrapper[5003]: I1206 15:32:54.374329 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:54 crc kubenswrapper[5003]: I1206 15:32:54.374350 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:54 crc kubenswrapper[5003]: I1206 15:32:54.374373 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:54 crc kubenswrapper[5003]: I1206 15:32:54.374388 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:54Z","lastTransitionTime":"2025-12-06T15:32:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:54 crc kubenswrapper[5003]: I1206 15:32:54.476735 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:54 crc kubenswrapper[5003]: I1206 15:32:54.476783 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:54 crc kubenswrapper[5003]: I1206 15:32:54.476793 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:54 crc kubenswrapper[5003]: I1206 15:32:54.476808 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:54 crc kubenswrapper[5003]: I1206 15:32:54.476819 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:54Z","lastTransitionTime":"2025-12-06T15:32:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:54 crc kubenswrapper[5003]: I1206 15:32:54.579329 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:54 crc kubenswrapper[5003]: I1206 15:32:54.579366 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:54 crc kubenswrapper[5003]: I1206 15:32:54.579375 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:54 crc kubenswrapper[5003]: I1206 15:32:54.579388 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:54 crc kubenswrapper[5003]: I1206 15:32:54.579397 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:54Z","lastTransitionTime":"2025-12-06T15:32:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:54 crc kubenswrapper[5003]: I1206 15:32:54.682399 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:54 crc kubenswrapper[5003]: I1206 15:32:54.682472 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:54 crc kubenswrapper[5003]: I1206 15:32:54.682542 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:54 crc kubenswrapper[5003]: I1206 15:32:54.682576 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:54 crc kubenswrapper[5003]: I1206 15:32:54.682597 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:54Z","lastTransitionTime":"2025-12-06T15:32:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:54 crc kubenswrapper[5003]: I1206 15:32:54.711783 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-jmzd9" Dec 06 15:32:54 crc kubenswrapper[5003]: E1206 15:32:54.711947 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-jmzd9" podUID="9fa121e1-7f2f-4912-945f-86cb199c3014" Dec 06 15:32:54 crc kubenswrapper[5003]: I1206 15:32:54.785206 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:54 crc kubenswrapper[5003]: I1206 15:32:54.785276 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:54 crc kubenswrapper[5003]: I1206 15:32:54.785299 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:54 crc kubenswrapper[5003]: I1206 15:32:54.785328 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:54 crc kubenswrapper[5003]: I1206 15:32:54.785351 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:54Z","lastTransitionTime":"2025-12-06T15:32:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:54 crc kubenswrapper[5003]: I1206 15:32:54.887957 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:54 crc kubenswrapper[5003]: I1206 15:32:54.888035 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:54 crc kubenswrapper[5003]: I1206 15:32:54.888070 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:54 crc kubenswrapper[5003]: I1206 15:32:54.888096 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:54 crc kubenswrapper[5003]: I1206 15:32:54.888115 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:54Z","lastTransitionTime":"2025-12-06T15:32:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:54 crc kubenswrapper[5003]: I1206 15:32:54.990796 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:54 crc kubenswrapper[5003]: I1206 15:32:54.990852 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:54 crc kubenswrapper[5003]: I1206 15:32:54.990868 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:54 crc kubenswrapper[5003]: I1206 15:32:54.990890 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:54 crc kubenswrapper[5003]: I1206 15:32:54.990906 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:54Z","lastTransitionTime":"2025-12-06T15:32:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:55 crc kubenswrapper[5003]: I1206 15:32:55.063896 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-p7xwd_8a695d94-271c-45bc-8a89-dfdecb57ec00/ovnkube-controller/2.log" Dec 06 15:32:55 crc kubenswrapper[5003]: I1206 15:32:55.093118 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:55 crc kubenswrapper[5003]: I1206 15:32:55.093180 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:55 crc kubenswrapper[5003]: I1206 15:32:55.093192 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:55 crc kubenswrapper[5003]: I1206 15:32:55.093205 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:55 crc kubenswrapper[5003]: I1206 15:32:55.093216 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:55Z","lastTransitionTime":"2025-12-06T15:32:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:55 crc kubenswrapper[5003]: I1206 15:32:55.195668 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:55 crc kubenswrapper[5003]: I1206 15:32:55.195734 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:55 crc kubenswrapper[5003]: I1206 15:32:55.195753 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:55 crc kubenswrapper[5003]: I1206 15:32:55.195778 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:55 crc kubenswrapper[5003]: I1206 15:32:55.195795 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:55Z","lastTransitionTime":"2025-12-06T15:32:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:55 crc kubenswrapper[5003]: I1206 15:32:55.299996 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:55 crc kubenswrapper[5003]: I1206 15:32:55.300076 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:55 crc kubenswrapper[5003]: I1206 15:32:55.300100 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:55 crc kubenswrapper[5003]: I1206 15:32:55.300130 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:55 crc kubenswrapper[5003]: I1206 15:32:55.300151 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:55Z","lastTransitionTime":"2025-12-06T15:32:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:55 crc kubenswrapper[5003]: I1206 15:32:55.376452 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:55 crc kubenswrapper[5003]: I1206 15:32:55.376561 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:55 crc kubenswrapper[5003]: I1206 15:32:55.376598 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:55 crc kubenswrapper[5003]: I1206 15:32:55.376629 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:55 crc kubenswrapper[5003]: I1206 15:32:55.376653 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:55Z","lastTransitionTime":"2025-12-06T15:32:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:55 crc kubenswrapper[5003]: E1206 15:32:55.400152 5003 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-06T15:32:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-06T15:32:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:55Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-06T15:32:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-06T15:32:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:55Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"010ec561-c8bb-454b-ab74-658add58caba\\\",\\\"systemUUID\\\":\\\"42b9e6db-6fd4-4e84-a5b7-176f28bfe2f1\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:55Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:55 crc kubenswrapper[5003]: I1206 15:32:55.407108 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:55 crc kubenswrapper[5003]: I1206 15:32:55.407172 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:55 crc kubenswrapper[5003]: I1206 15:32:55.407188 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:55 crc kubenswrapper[5003]: I1206 15:32:55.407214 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:55 crc kubenswrapper[5003]: I1206 15:32:55.407230 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:55Z","lastTransitionTime":"2025-12-06T15:32:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:55 crc kubenswrapper[5003]: E1206 15:32:55.429371 5003 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-06T15:32:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-06T15:32:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:55Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-06T15:32:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-06T15:32:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:55Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"010ec561-c8bb-454b-ab74-658add58caba\\\",\\\"systemUUID\\\":\\\"42b9e6db-6fd4-4e84-a5b7-176f28bfe2f1\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:55Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:55 crc kubenswrapper[5003]: I1206 15:32:55.434648 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:55 crc kubenswrapper[5003]: I1206 15:32:55.434732 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:55 crc kubenswrapper[5003]: I1206 15:32:55.434764 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:55 crc kubenswrapper[5003]: I1206 15:32:55.434796 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:55 crc kubenswrapper[5003]: I1206 15:32:55.434818 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:55Z","lastTransitionTime":"2025-12-06T15:32:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:55 crc kubenswrapper[5003]: E1206 15:32:55.454921 5003 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-06T15:32:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-06T15:32:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:55Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-06T15:32:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-06T15:32:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:55Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"010ec561-c8bb-454b-ab74-658add58caba\\\",\\\"systemUUID\\\":\\\"42b9e6db-6fd4-4e84-a5b7-176f28bfe2f1\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:55Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:55 crc kubenswrapper[5003]: I1206 15:32:55.459776 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:55 crc kubenswrapper[5003]: I1206 15:32:55.459829 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:55 crc kubenswrapper[5003]: I1206 15:32:55.459845 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:55 crc kubenswrapper[5003]: I1206 15:32:55.459868 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:55 crc kubenswrapper[5003]: I1206 15:32:55.459886 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:55Z","lastTransitionTime":"2025-12-06T15:32:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:55 crc kubenswrapper[5003]: E1206 15:32:55.481815 5003 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-06T15:32:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-06T15:32:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:55Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-06T15:32:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-06T15:32:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:55Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"010ec561-c8bb-454b-ab74-658add58caba\\\",\\\"systemUUID\\\":\\\"42b9e6db-6fd4-4e84-a5b7-176f28bfe2f1\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:55Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:55 crc kubenswrapper[5003]: I1206 15:32:55.486386 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:55 crc kubenswrapper[5003]: I1206 15:32:55.486439 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:55 crc kubenswrapper[5003]: I1206 15:32:55.486457 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:55 crc kubenswrapper[5003]: I1206 15:32:55.486480 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:55 crc kubenswrapper[5003]: I1206 15:32:55.486528 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:55Z","lastTransitionTime":"2025-12-06T15:32:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:55 crc kubenswrapper[5003]: E1206 15:32:55.511630 5003 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-06T15:32:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-06T15:32:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:55Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-06T15:32:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-06T15:32:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:55Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"010ec561-c8bb-454b-ab74-658add58caba\\\",\\\"systemUUID\\\":\\\"42b9e6db-6fd4-4e84-a5b7-176f28bfe2f1\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:55Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:55 crc kubenswrapper[5003]: E1206 15:32:55.511782 5003 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 06 15:32:55 crc kubenswrapper[5003]: I1206 15:32:55.513649 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:55 crc kubenswrapper[5003]: I1206 15:32:55.513694 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:55 crc kubenswrapper[5003]: I1206 15:32:55.513710 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:55 crc kubenswrapper[5003]: I1206 15:32:55.513732 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:55 crc kubenswrapper[5003]: I1206 15:32:55.513748 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:55Z","lastTransitionTime":"2025-12-06T15:32:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:55 crc kubenswrapper[5003]: I1206 15:32:55.524367 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 06 15:32:55 crc kubenswrapper[5003]: I1206 15:32:55.538667 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler/openshift-kube-scheduler-crc"] Dec 06 15:32:55 crc kubenswrapper[5003]: I1206 15:32:55.541776 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:55Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:55 crc kubenswrapper[5003]: I1206 15:32:55.555463 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-mdz5n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2a9b6e3b-054f-40de-9ad6-dd7eb78eaea1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cac97fc8a54c2fbd81b0abade11a987e5226084d8a4f75ddbbaad1f6c4cb9783\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wqmpg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:25Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-mdz5n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:55Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:55 crc kubenswrapper[5003]: I1206 15:32:55.570679 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-w25db" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1a047c4d-003e-4668-9b96-945eab34ab68\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b3a6db6c2c8e6a283f7b46ef28ba7310d3cfc7054cfd8cbcc290ebd0d26dcf4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6dn2k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b457ba1d45f5e1524b8539c4c7dc77b24f6d5aa8172b46962e31e4fb6723b7ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6dn2k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:26Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-w25db\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:55Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:55 crc kubenswrapper[5003]: I1206 15:32:55.589210 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-7788j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f27a64b8-ecd1-4201-a3bd-a4f5a0aa05a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4bf147399c10dd7e654abd5213c4d90e8aa9feca7f8c6032c16576a25aeace68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twj7c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f47c00cc6d341dbe1a5f3495f04fe4e695370952f6d6b209a5aaff1ace9d17e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twj7c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:39Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-7788j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:55Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:55 crc kubenswrapper[5003]: I1206 15:32:55.602078 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c48c2555-6e1a-4e2a-801c-de22b016a80d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d0880a937c71f4a7b46ffc4dcc10f50f5db23655dedb5d7e95b36cf0fcb44928\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f5d5d47cea1d06df2f36a0a15126c2419e5051d84842c781921577bec3c65f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5e7cb74b8b6e2a00de6dbbd1689b52fcc3ae9862d40685bab7a805646abd4a3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3377599a5ceebbce7bd9d45a6f78122fc48d86ff917a6dcfc03304ca3361659\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:02Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:55Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:55 crc kubenswrapper[5003]: I1206 15:32:55.615345 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa744e792aa97558246eaa95c80a307138b1a6e08fe5de144b3fff62f3932e20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:55Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:55 crc kubenswrapper[5003]: I1206 15:32:55.616705 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:55 crc kubenswrapper[5003]: I1206 15:32:55.616837 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:55 crc kubenswrapper[5003]: I1206 15:32:55.617116 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:55 crc kubenswrapper[5003]: I1206 15:32:55.617231 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:55 crc kubenswrapper[5003]: I1206 15:32:55.617425 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:55Z","lastTransitionTime":"2025-12-06T15:32:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:55 crc kubenswrapper[5003]: I1206 15:32:55.629521 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://224b845cf75da20cf87e5435770288110776a0edd92c9c110d6dc539419ecb7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0df37db3b8682032a0065b4ca1cead69bf0b32cb4e2fab89c53e14eb2169b7e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:55Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:55 crc kubenswrapper[5003]: I1206 15:32:55.642307 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-j4rf7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7dc41f9e-e763-4a9a-a064-f65bc24332b9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://467dcc8ed3936c6fb24f6c6c3e42eccf3597ce6920d12b253946a123ee22faf8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://79eb73c778f65e6694b0a3243db40d605557c86145f9b40ce2dfcf922ec7f38f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://79eb73c778f65e6694b0a3243db40d605557c86145f9b40ce2dfcf922ec7f38f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://10cf47f05fda3c702decadc5c1cf0b6fb4df56993610f4d7bc2809e685109f6c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://10cf47f05fda3c702decadc5c1cf0b6fb4df56993610f4d7bc2809e685109f6c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a33d5c66fd9234a89ff1f28e4863f80104b5d1caccd24cafa22a4598a42906c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a33d5c66fd9234a89ff1f28e4863f80104b5d1caccd24cafa22a4598a42906c3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://895f9f2647488793d0875e8cb05a6e6515733dae00a856e6a4705702adc3a5ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://895f9f2647488793d0875e8cb05a6e6515733dae00a856e6a4705702adc3a5ed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a02f680341e0dfb569ffcd7902d5b1fe3d575a423ff541d86c7bdd4d76b733b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a02f680341e0dfb569ffcd7902d5b1fe3d575a423ff541d86c7bdd4d76b733b0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f6cda90c763eedc34903164d30c28d3cb696f658455d0a972fbf8d8e5505a587\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f6cda90c763eedc34903164d30c28d3cb696f658455d0a972fbf8d8e5505a587\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-j4rf7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:55Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:55 crc kubenswrapper[5003]: I1206 15:32:55.658918 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5ae2d36f-5a31-4da3-aae8-0378c481f230\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9f683cdba22afa5d1d069cc32c6c83addf344c8b0ba3b39b7a2ca527408922c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c777cc5d07ff005aa8001da8a566484710f0253e4a6061e3297a884c6153a7d0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ac8291e7fa9a5ff379474802b6eae771542705aef4c443cc3123837d10dd9e5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae2215e89d65c7bd69288cda74cdd83f0349f57b47f80ff1fa03d60a484558b5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://efc65bc9bb60202069cb51eed80fb62e527399c4189554e791bab3e23993c95c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-06T15:32:20Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1206 15:32:14.957831 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1206 15:32:14.959306 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1213261896/tls.crt::/tmp/serving-cert-1213261896/tls.key\\\\\\\"\\\\nI1206 15:32:20.684925 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1206 15:32:20.690335 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1206 15:32:20.690422 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1206 15:32:20.690568 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1206 15:32:20.690599 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1206 15:32:20.700990 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1206 15:32:20.701016 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1206 15:32:20.701048 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1206 15:32:20.701062 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1206 15:32:20.701075 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1206 15:32:20.701085 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1206 15:32:20.701093 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1206 15:32:20.701100 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1206 15:32:20.702607 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:04Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6013b3a95ef99138e6ea971b51a45f65923c09435a2595bba7af91eebb28d76\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4256540a96bdcd018eb514cf70eea305513bb418afa89c8bfa5179c67822380e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4256540a96bdcd018eb514cf70eea305513bb418afa89c8bfa5179c67822380e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:55Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:55 crc kubenswrapper[5003]: I1206 15:32:55.667770 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:24Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:24Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ab9b858ed30211c345e146452a18252752eeedcb3be8054daa2af371297cbb61\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:55Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:55 crc kubenswrapper[5003]: I1206 15:32:55.683401 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7xwd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a695d94-271c-45bc-8a89-dfdecb57ec00\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b26a01a6a0644d88028ebc041e00353091f26407f109e02e4aec1ec0e382a4c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b549c3d1ba39068a4f1ce344e0807eed3b4e73ee1902d02cebffe005697201d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5240dcf05f3d661b92408f4c39999571d023d9acdeb31d4a4a99f50e31a627b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e240d517f92fb2b8e24bb6f1c1d98869b41adf8bc3ab687a4426840c7010235e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b871ea5e9d2093579af10786c02da9d7f5afa5351933be742b67b1b682733119\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://276d6d2f680f4e0b2038d12b161e3fd3f85417b5d776b9661559b4812ead1dd9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f0ad17b1a040807cb7cca5ee53ef54da1870f63cadccb3aa20421b08016cf10\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4c6884100d6c3b4944019e7d9e7a2bdaf2013d697ce3980863742a60a7b52d7f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-06T15:32:39Z\\\",\\\"message\\\":\\\"kipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}, built lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-marketplace/redhat-marketplace_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-marketplace/redhat-marketplace\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.140\\\\\\\", Port:50051, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI1206 15:32:39.139098 6445 ovnkube.go:599] Stopped ovnkube\\\\nI1206 15:32:39.139178 6445 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1206 15:32:39.139236 6445 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to s\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:38Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2f0ad17b1a040807cb7cca5ee53ef54da1870f63cadccb3aa20421b08016cf10\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-06T15:32:53Z\\\",\\\"message\\\":\\\"d44-bbd8-dba87b7dbaf0}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1206 15:32:53.311445 6646 ovn.go:134] Ensuring zone local for Pod openshift-multus/multus-additional-cni-plugins-j4rf7 in node crc\\\\nI1206 15:32:53.311457 6646 obj_retry.go:386] Retry successful for *v1.Pod openshift-multus/multus-additional-cni-plugins-j4rf7 after 0 failed attempt(s)\\\\nI1206 15:32:53.311473 6646 default_network_controller.go:776] Recording success event on pod openshift-multus/multus-additional-cni-plugins-j4rf7\\\\nF1206 15:32:53.311504 6646 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:53Z is after 2025-08-24\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c56c8c7aa570bb81c41923b20b09d6de6b278e56ca466e7e0ee3cecf113c37d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a00902f2c2c7ca56d86553f78094f40f59ea9ef125f5a388aafd6d7fd0c20d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9a00902f2c2c7ca56d86553f78094f40f59ea9ef125f5a388aafd6d7fd0c20d9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-p7xwd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:55Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:55 crc kubenswrapper[5003]: I1206 15:32:55.696594 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:55Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:55 crc kubenswrapper[5003]: I1206 15:32:55.707442 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:55Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:55 crc kubenswrapper[5003]: I1206 15:32:55.713626 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 06 15:32:55 crc kubenswrapper[5003]: E1206 15:32:55.714002 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 06 15:32:55 crc kubenswrapper[5003]: I1206 15:32:55.713783 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 06 15:32:55 crc kubenswrapper[5003]: E1206 15:32:55.714238 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 06 15:32:55 crc kubenswrapper[5003]: I1206 15:32:55.713711 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 06 15:32:55 crc kubenswrapper[5003]: E1206 15:32:55.714448 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 06 15:32:55 crc kubenswrapper[5003]: I1206 15:32:55.720804 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:55 crc kubenswrapper[5003]: I1206 15:32:55.721010 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:55 crc kubenswrapper[5003]: I1206 15:32:55.721125 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:55 crc kubenswrapper[5003]: I1206 15:32:55.721224 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:55 crc kubenswrapper[5003]: I1206 15:32:55.721314 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:55Z","lastTransitionTime":"2025-12-06T15:32:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:55 crc kubenswrapper[5003]: I1206 15:32:55.723671 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-qcqkl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"956ed4a8-8918-48eb-a2f8-f35a9ae17fde\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8ae5ee47d26422cffca5f12a4ee1455dcd34c148b1b490d69b88280b4497da11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbhrw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-qcqkl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:55Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:55 crc kubenswrapper[5003]: I1206 15:32:55.736338 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-9kdpn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"350e8b9a-b7bf-4dc9-abe9-d10f7a088be3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e9b32883e20da5b298374f7eebd0a21c5ca51cca23543fc78dfe3edb04e3b661\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-46c8r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-9kdpn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:55Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:55 crc kubenswrapper[5003]: I1206 15:32:55.749613 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-jmzd9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9fa121e1-7f2f-4912-945f-86cb199c3014\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:41Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:41Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9s4lw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9s4lw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:41Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-jmzd9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:55Z is after 2025-08-24T17:21:41Z" Dec 06 15:32:55 crc kubenswrapper[5003]: I1206 15:32:55.824046 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:55 crc kubenswrapper[5003]: I1206 15:32:55.824102 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:55 crc kubenswrapper[5003]: I1206 15:32:55.824117 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:55 crc kubenswrapper[5003]: I1206 15:32:55.824137 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:55 crc kubenswrapper[5003]: I1206 15:32:55.824150 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:55Z","lastTransitionTime":"2025-12-06T15:32:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:55 crc kubenswrapper[5003]: I1206 15:32:55.926757 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:55 crc kubenswrapper[5003]: I1206 15:32:55.926821 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:55 crc kubenswrapper[5003]: I1206 15:32:55.926847 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:55 crc kubenswrapper[5003]: I1206 15:32:55.926877 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:55 crc kubenswrapper[5003]: I1206 15:32:55.926901 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:55Z","lastTransitionTime":"2025-12-06T15:32:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:56 crc kubenswrapper[5003]: I1206 15:32:56.030021 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:56 crc kubenswrapper[5003]: I1206 15:32:56.030324 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:56 crc kubenswrapper[5003]: I1206 15:32:56.030524 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:56 crc kubenswrapper[5003]: I1206 15:32:56.030711 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:56 crc kubenswrapper[5003]: I1206 15:32:56.030862 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:56Z","lastTransitionTime":"2025-12-06T15:32:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:56 crc kubenswrapper[5003]: I1206 15:32:56.133194 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:56 crc kubenswrapper[5003]: I1206 15:32:56.133224 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:56 crc kubenswrapper[5003]: I1206 15:32:56.133232 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:56 crc kubenswrapper[5003]: I1206 15:32:56.133245 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:56 crc kubenswrapper[5003]: I1206 15:32:56.133256 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:56Z","lastTransitionTime":"2025-12-06T15:32:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:56 crc kubenswrapper[5003]: I1206 15:32:56.235920 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:56 crc kubenswrapper[5003]: I1206 15:32:56.235982 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:56 crc kubenswrapper[5003]: I1206 15:32:56.236007 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:56 crc kubenswrapper[5003]: I1206 15:32:56.236036 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:56 crc kubenswrapper[5003]: I1206 15:32:56.236059 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:56Z","lastTransitionTime":"2025-12-06T15:32:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:56 crc kubenswrapper[5003]: I1206 15:32:56.338307 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:56 crc kubenswrapper[5003]: I1206 15:32:56.338342 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:56 crc kubenswrapper[5003]: I1206 15:32:56.338352 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:56 crc kubenswrapper[5003]: I1206 15:32:56.338367 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:56 crc kubenswrapper[5003]: I1206 15:32:56.338379 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:56Z","lastTransitionTime":"2025-12-06T15:32:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:56 crc kubenswrapper[5003]: I1206 15:32:56.440416 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:56 crc kubenswrapper[5003]: I1206 15:32:56.440479 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:56 crc kubenswrapper[5003]: I1206 15:32:56.440550 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:56 crc kubenswrapper[5003]: I1206 15:32:56.440587 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:56 crc kubenswrapper[5003]: I1206 15:32:56.440610 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:56Z","lastTransitionTime":"2025-12-06T15:32:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:56 crc kubenswrapper[5003]: I1206 15:32:56.543804 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:56 crc kubenswrapper[5003]: I1206 15:32:56.543863 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:56 crc kubenswrapper[5003]: I1206 15:32:56.543877 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:56 crc kubenswrapper[5003]: I1206 15:32:56.543894 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:56 crc kubenswrapper[5003]: I1206 15:32:56.543904 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:56Z","lastTransitionTime":"2025-12-06T15:32:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:56 crc kubenswrapper[5003]: I1206 15:32:56.647179 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:56 crc kubenswrapper[5003]: I1206 15:32:56.647252 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:56 crc kubenswrapper[5003]: I1206 15:32:56.647278 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:56 crc kubenswrapper[5003]: I1206 15:32:56.647310 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:56 crc kubenswrapper[5003]: I1206 15:32:56.647333 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:56Z","lastTransitionTime":"2025-12-06T15:32:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:56 crc kubenswrapper[5003]: I1206 15:32:56.712159 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-jmzd9" Dec 06 15:32:56 crc kubenswrapper[5003]: E1206 15:32:56.712330 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-jmzd9" podUID="9fa121e1-7f2f-4912-945f-86cb199c3014" Dec 06 15:32:56 crc kubenswrapper[5003]: I1206 15:32:56.750709 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:56 crc kubenswrapper[5003]: I1206 15:32:56.750949 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:56 crc kubenswrapper[5003]: I1206 15:32:56.751076 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:56 crc kubenswrapper[5003]: I1206 15:32:56.751176 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:56 crc kubenswrapper[5003]: I1206 15:32:56.751317 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:56Z","lastTransitionTime":"2025-12-06T15:32:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:56 crc kubenswrapper[5003]: I1206 15:32:56.853806 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:56 crc kubenswrapper[5003]: I1206 15:32:56.853890 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:56 crc kubenswrapper[5003]: I1206 15:32:56.853909 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:56 crc kubenswrapper[5003]: I1206 15:32:56.853938 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:56 crc kubenswrapper[5003]: I1206 15:32:56.853956 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:56Z","lastTransitionTime":"2025-12-06T15:32:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:56 crc kubenswrapper[5003]: I1206 15:32:56.956787 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:56 crc kubenswrapper[5003]: I1206 15:32:56.956845 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:56 crc kubenswrapper[5003]: I1206 15:32:56.956863 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:56 crc kubenswrapper[5003]: I1206 15:32:56.956888 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:56 crc kubenswrapper[5003]: I1206 15:32:56.956905 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:56Z","lastTransitionTime":"2025-12-06T15:32:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:56 crc kubenswrapper[5003]: I1206 15:32:56.981430 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/9fa121e1-7f2f-4912-945f-86cb199c3014-metrics-certs\") pod \"network-metrics-daemon-jmzd9\" (UID: \"9fa121e1-7f2f-4912-945f-86cb199c3014\") " pod="openshift-multus/network-metrics-daemon-jmzd9" Dec 06 15:32:56 crc kubenswrapper[5003]: E1206 15:32:56.981666 5003 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 06 15:32:56 crc kubenswrapper[5003]: E1206 15:32:56.981818 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/9fa121e1-7f2f-4912-945f-86cb199c3014-metrics-certs podName:9fa121e1-7f2f-4912-945f-86cb199c3014 nodeName:}" failed. No retries permitted until 2025-12-06 15:33:12.981748593 +0000 UTC m=+71.515103024 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/9fa121e1-7f2f-4912-945f-86cb199c3014-metrics-certs") pod "network-metrics-daemon-jmzd9" (UID: "9fa121e1-7f2f-4912-945f-86cb199c3014") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 06 15:32:57 crc kubenswrapper[5003]: I1206 15:32:57.059460 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:57 crc kubenswrapper[5003]: I1206 15:32:57.059537 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:57 crc kubenswrapper[5003]: I1206 15:32:57.059548 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:57 crc kubenswrapper[5003]: I1206 15:32:57.059562 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:57 crc kubenswrapper[5003]: I1206 15:32:57.059572 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:57Z","lastTransitionTime":"2025-12-06T15:32:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:57 crc kubenswrapper[5003]: I1206 15:32:57.162332 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:57 crc kubenswrapper[5003]: I1206 15:32:57.162378 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:57 crc kubenswrapper[5003]: I1206 15:32:57.162389 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:57 crc kubenswrapper[5003]: I1206 15:32:57.162405 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:57 crc kubenswrapper[5003]: I1206 15:32:57.162419 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:57Z","lastTransitionTime":"2025-12-06T15:32:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:57 crc kubenswrapper[5003]: I1206 15:32:57.264856 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:57 crc kubenswrapper[5003]: I1206 15:32:57.264910 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:57 crc kubenswrapper[5003]: I1206 15:32:57.264922 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:57 crc kubenswrapper[5003]: I1206 15:32:57.264939 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:57 crc kubenswrapper[5003]: I1206 15:32:57.264951 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:57Z","lastTransitionTime":"2025-12-06T15:32:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:57 crc kubenswrapper[5003]: I1206 15:32:57.367532 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:57 crc kubenswrapper[5003]: I1206 15:32:57.367589 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:57 crc kubenswrapper[5003]: I1206 15:32:57.367597 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:57 crc kubenswrapper[5003]: I1206 15:32:57.367610 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:57 crc kubenswrapper[5003]: I1206 15:32:57.367641 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:57Z","lastTransitionTime":"2025-12-06T15:32:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:57 crc kubenswrapper[5003]: I1206 15:32:57.469889 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:57 crc kubenswrapper[5003]: I1206 15:32:57.469924 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:57 crc kubenswrapper[5003]: I1206 15:32:57.469935 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:57 crc kubenswrapper[5003]: I1206 15:32:57.469951 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:57 crc kubenswrapper[5003]: I1206 15:32:57.469964 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:57Z","lastTransitionTime":"2025-12-06T15:32:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:57 crc kubenswrapper[5003]: I1206 15:32:57.573271 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:57 crc kubenswrapper[5003]: I1206 15:32:57.573330 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:57 crc kubenswrapper[5003]: I1206 15:32:57.573344 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:57 crc kubenswrapper[5003]: I1206 15:32:57.573363 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:57 crc kubenswrapper[5003]: I1206 15:32:57.573375 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:57Z","lastTransitionTime":"2025-12-06T15:32:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:57 crc kubenswrapper[5003]: I1206 15:32:57.675677 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:57 crc kubenswrapper[5003]: I1206 15:32:57.675758 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:57 crc kubenswrapper[5003]: I1206 15:32:57.675780 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:57 crc kubenswrapper[5003]: I1206 15:32:57.675805 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:57 crc kubenswrapper[5003]: I1206 15:32:57.675822 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:57Z","lastTransitionTime":"2025-12-06T15:32:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:57 crc kubenswrapper[5003]: I1206 15:32:57.711279 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 06 15:32:57 crc kubenswrapper[5003]: I1206 15:32:57.711342 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 06 15:32:57 crc kubenswrapper[5003]: E1206 15:32:57.711386 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 06 15:32:57 crc kubenswrapper[5003]: I1206 15:32:57.711351 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 06 15:32:57 crc kubenswrapper[5003]: E1206 15:32:57.711465 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 06 15:32:57 crc kubenswrapper[5003]: E1206 15:32:57.711700 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 06 15:32:57 crc kubenswrapper[5003]: I1206 15:32:57.778589 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:57 crc kubenswrapper[5003]: I1206 15:32:57.778646 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:57 crc kubenswrapper[5003]: I1206 15:32:57.778661 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:57 crc kubenswrapper[5003]: I1206 15:32:57.778681 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:57 crc kubenswrapper[5003]: I1206 15:32:57.778695 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:57Z","lastTransitionTime":"2025-12-06T15:32:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:57 crc kubenswrapper[5003]: I1206 15:32:57.881651 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:57 crc kubenswrapper[5003]: I1206 15:32:57.881726 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:57 crc kubenswrapper[5003]: I1206 15:32:57.881751 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:57 crc kubenswrapper[5003]: I1206 15:32:57.881781 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:57 crc kubenswrapper[5003]: I1206 15:32:57.881802 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:57Z","lastTransitionTime":"2025-12-06T15:32:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:57 crc kubenswrapper[5003]: I1206 15:32:57.984639 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:57 crc kubenswrapper[5003]: I1206 15:32:57.984783 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:57 crc kubenswrapper[5003]: I1206 15:32:57.984804 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:57 crc kubenswrapper[5003]: I1206 15:32:57.984830 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:57 crc kubenswrapper[5003]: I1206 15:32:57.984850 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:57Z","lastTransitionTime":"2025-12-06T15:32:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:58 crc kubenswrapper[5003]: I1206 15:32:58.091101 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:58 crc kubenswrapper[5003]: I1206 15:32:58.091139 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:58 crc kubenswrapper[5003]: I1206 15:32:58.091148 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:58 crc kubenswrapper[5003]: I1206 15:32:58.091183 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:58 crc kubenswrapper[5003]: I1206 15:32:58.091199 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:58Z","lastTransitionTime":"2025-12-06T15:32:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:58 crc kubenswrapper[5003]: I1206 15:32:58.193668 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:58 crc kubenswrapper[5003]: I1206 15:32:58.193721 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:58 crc kubenswrapper[5003]: I1206 15:32:58.193732 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:58 crc kubenswrapper[5003]: I1206 15:32:58.193748 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:58 crc kubenswrapper[5003]: I1206 15:32:58.193760 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:58Z","lastTransitionTime":"2025-12-06T15:32:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:58 crc kubenswrapper[5003]: I1206 15:32:58.295775 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:58 crc kubenswrapper[5003]: I1206 15:32:58.295943 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:58 crc kubenswrapper[5003]: I1206 15:32:58.295963 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:58 crc kubenswrapper[5003]: I1206 15:32:58.295984 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:58 crc kubenswrapper[5003]: I1206 15:32:58.296000 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:58Z","lastTransitionTime":"2025-12-06T15:32:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:58 crc kubenswrapper[5003]: I1206 15:32:58.397925 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:58 crc kubenswrapper[5003]: I1206 15:32:58.397970 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:58 crc kubenswrapper[5003]: I1206 15:32:58.397981 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:58 crc kubenswrapper[5003]: I1206 15:32:58.397998 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:58 crc kubenswrapper[5003]: I1206 15:32:58.398012 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:58Z","lastTransitionTime":"2025-12-06T15:32:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:58 crc kubenswrapper[5003]: I1206 15:32:58.500812 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:58 crc kubenswrapper[5003]: I1206 15:32:58.500861 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:58 crc kubenswrapper[5003]: I1206 15:32:58.500875 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:58 crc kubenswrapper[5003]: I1206 15:32:58.500892 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:58 crc kubenswrapper[5003]: I1206 15:32:58.500905 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:58Z","lastTransitionTime":"2025-12-06T15:32:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:58 crc kubenswrapper[5003]: I1206 15:32:58.603187 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:58 crc kubenswrapper[5003]: I1206 15:32:58.603230 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:58 crc kubenswrapper[5003]: I1206 15:32:58.603242 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:58 crc kubenswrapper[5003]: I1206 15:32:58.603259 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:58 crc kubenswrapper[5003]: I1206 15:32:58.603269 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:58Z","lastTransitionTime":"2025-12-06T15:32:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:58 crc kubenswrapper[5003]: I1206 15:32:58.705122 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:58 crc kubenswrapper[5003]: I1206 15:32:58.705175 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:58 crc kubenswrapper[5003]: I1206 15:32:58.705212 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:58 crc kubenswrapper[5003]: I1206 15:32:58.705231 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:58 crc kubenswrapper[5003]: I1206 15:32:58.705242 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:58Z","lastTransitionTime":"2025-12-06T15:32:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:58 crc kubenswrapper[5003]: I1206 15:32:58.711817 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-jmzd9" Dec 06 15:32:58 crc kubenswrapper[5003]: E1206 15:32:58.712010 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-jmzd9" podUID="9fa121e1-7f2f-4912-945f-86cb199c3014" Dec 06 15:32:58 crc kubenswrapper[5003]: I1206 15:32:58.807615 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:58 crc kubenswrapper[5003]: I1206 15:32:58.807661 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:58 crc kubenswrapper[5003]: I1206 15:32:58.807672 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:58 crc kubenswrapper[5003]: I1206 15:32:58.807689 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:58 crc kubenswrapper[5003]: I1206 15:32:58.807700 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:58Z","lastTransitionTime":"2025-12-06T15:32:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:58 crc kubenswrapper[5003]: I1206 15:32:58.910275 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:58 crc kubenswrapper[5003]: I1206 15:32:58.910372 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:58 crc kubenswrapper[5003]: I1206 15:32:58.910390 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:58 crc kubenswrapper[5003]: I1206 15:32:58.910414 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:58 crc kubenswrapper[5003]: I1206 15:32:58.910431 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:58Z","lastTransitionTime":"2025-12-06T15:32:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:59 crc kubenswrapper[5003]: I1206 15:32:59.012795 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:59 crc kubenswrapper[5003]: I1206 15:32:59.012833 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:59 crc kubenswrapper[5003]: I1206 15:32:59.012845 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:59 crc kubenswrapper[5003]: I1206 15:32:59.012860 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:59 crc kubenswrapper[5003]: I1206 15:32:59.012870 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:59Z","lastTransitionTime":"2025-12-06T15:32:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:59 crc kubenswrapper[5003]: I1206 15:32:59.115833 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:59 crc kubenswrapper[5003]: I1206 15:32:59.115872 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:59 crc kubenswrapper[5003]: I1206 15:32:59.115885 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:59 crc kubenswrapper[5003]: I1206 15:32:59.115912 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:59 crc kubenswrapper[5003]: I1206 15:32:59.115926 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:59Z","lastTransitionTime":"2025-12-06T15:32:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:59 crc kubenswrapper[5003]: I1206 15:32:59.218692 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:59 crc kubenswrapper[5003]: I1206 15:32:59.218769 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:59 crc kubenswrapper[5003]: I1206 15:32:59.218790 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:59 crc kubenswrapper[5003]: I1206 15:32:59.218838 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:59 crc kubenswrapper[5003]: I1206 15:32:59.218852 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:59Z","lastTransitionTime":"2025-12-06T15:32:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:59 crc kubenswrapper[5003]: I1206 15:32:59.322087 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:59 crc kubenswrapper[5003]: I1206 15:32:59.322171 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:59 crc kubenswrapper[5003]: I1206 15:32:59.322195 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:59 crc kubenswrapper[5003]: I1206 15:32:59.322227 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:59 crc kubenswrapper[5003]: I1206 15:32:59.322249 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:59Z","lastTransitionTime":"2025-12-06T15:32:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:59 crc kubenswrapper[5003]: I1206 15:32:59.424816 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:59 crc kubenswrapper[5003]: I1206 15:32:59.424865 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:59 crc kubenswrapper[5003]: I1206 15:32:59.424875 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:59 crc kubenswrapper[5003]: I1206 15:32:59.424893 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:59 crc kubenswrapper[5003]: I1206 15:32:59.424904 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:59Z","lastTransitionTime":"2025-12-06T15:32:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:59 crc kubenswrapper[5003]: I1206 15:32:59.528128 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:59 crc kubenswrapper[5003]: I1206 15:32:59.528184 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:59 crc kubenswrapper[5003]: I1206 15:32:59.528197 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:59 crc kubenswrapper[5003]: I1206 15:32:59.528214 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:59 crc kubenswrapper[5003]: I1206 15:32:59.528227 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:59Z","lastTransitionTime":"2025-12-06T15:32:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:59 crc kubenswrapper[5003]: I1206 15:32:59.631680 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:59 crc kubenswrapper[5003]: I1206 15:32:59.631754 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:59 crc kubenswrapper[5003]: I1206 15:32:59.631779 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:59 crc kubenswrapper[5003]: I1206 15:32:59.631813 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:59 crc kubenswrapper[5003]: I1206 15:32:59.631840 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:59Z","lastTransitionTime":"2025-12-06T15:32:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:59 crc kubenswrapper[5003]: I1206 15:32:59.711882 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 06 15:32:59 crc kubenswrapper[5003]: E1206 15:32:59.712108 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 06 15:32:59 crc kubenswrapper[5003]: I1206 15:32:59.712171 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 06 15:32:59 crc kubenswrapper[5003]: I1206 15:32:59.712120 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 06 15:32:59 crc kubenswrapper[5003]: E1206 15:32:59.712367 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 06 15:32:59 crc kubenswrapper[5003]: E1206 15:32:59.712570 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 06 15:32:59 crc kubenswrapper[5003]: I1206 15:32:59.734937 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:59 crc kubenswrapper[5003]: I1206 15:32:59.734992 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:59 crc kubenswrapper[5003]: I1206 15:32:59.735012 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:59 crc kubenswrapper[5003]: I1206 15:32:59.735035 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:59 crc kubenswrapper[5003]: I1206 15:32:59.735054 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:59Z","lastTransitionTime":"2025-12-06T15:32:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:59 crc kubenswrapper[5003]: I1206 15:32:59.838313 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:59 crc kubenswrapper[5003]: I1206 15:32:59.838368 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:59 crc kubenswrapper[5003]: I1206 15:32:59.838392 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:59 crc kubenswrapper[5003]: I1206 15:32:59.838413 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:59 crc kubenswrapper[5003]: I1206 15:32:59.838427 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:59Z","lastTransitionTime":"2025-12-06T15:32:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:32:59 crc kubenswrapper[5003]: I1206 15:32:59.941720 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:32:59 crc kubenswrapper[5003]: I1206 15:32:59.941786 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:32:59 crc kubenswrapper[5003]: I1206 15:32:59.941805 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:32:59 crc kubenswrapper[5003]: I1206 15:32:59.941844 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:32:59 crc kubenswrapper[5003]: I1206 15:32:59.941899 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:32:59Z","lastTransitionTime":"2025-12-06T15:32:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:00 crc kubenswrapper[5003]: I1206 15:33:00.045295 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:00 crc kubenswrapper[5003]: I1206 15:33:00.045388 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:00 crc kubenswrapper[5003]: I1206 15:33:00.045411 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:00 crc kubenswrapper[5003]: I1206 15:33:00.045441 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:00 crc kubenswrapper[5003]: I1206 15:33:00.045468 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:00Z","lastTransitionTime":"2025-12-06T15:33:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:00 crc kubenswrapper[5003]: I1206 15:33:00.148310 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:00 crc kubenswrapper[5003]: I1206 15:33:00.148373 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:00 crc kubenswrapper[5003]: I1206 15:33:00.148396 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:00 crc kubenswrapper[5003]: I1206 15:33:00.148425 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:00 crc kubenswrapper[5003]: I1206 15:33:00.148446 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:00Z","lastTransitionTime":"2025-12-06T15:33:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:00 crc kubenswrapper[5003]: I1206 15:33:00.251230 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:00 crc kubenswrapper[5003]: I1206 15:33:00.251297 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:00 crc kubenswrapper[5003]: I1206 15:33:00.251315 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:00 crc kubenswrapper[5003]: I1206 15:33:00.251337 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:00 crc kubenswrapper[5003]: I1206 15:33:00.251354 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:00Z","lastTransitionTime":"2025-12-06T15:33:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:00 crc kubenswrapper[5003]: I1206 15:33:00.353425 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:00 crc kubenswrapper[5003]: I1206 15:33:00.353667 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:00 crc kubenswrapper[5003]: I1206 15:33:00.353689 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:00 crc kubenswrapper[5003]: I1206 15:33:00.353707 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:00 crc kubenswrapper[5003]: I1206 15:33:00.353718 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:00Z","lastTransitionTime":"2025-12-06T15:33:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:00 crc kubenswrapper[5003]: I1206 15:33:00.457156 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:00 crc kubenswrapper[5003]: I1206 15:33:00.457221 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:00 crc kubenswrapper[5003]: I1206 15:33:00.457246 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:00 crc kubenswrapper[5003]: I1206 15:33:00.457274 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:00 crc kubenswrapper[5003]: I1206 15:33:00.457291 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:00Z","lastTransitionTime":"2025-12-06T15:33:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:00 crc kubenswrapper[5003]: I1206 15:33:00.559770 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:00 crc kubenswrapper[5003]: I1206 15:33:00.559811 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:00 crc kubenswrapper[5003]: I1206 15:33:00.559823 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:00 crc kubenswrapper[5003]: I1206 15:33:00.559839 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:00 crc kubenswrapper[5003]: I1206 15:33:00.559851 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:00Z","lastTransitionTime":"2025-12-06T15:33:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:00 crc kubenswrapper[5003]: I1206 15:33:00.662217 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:00 crc kubenswrapper[5003]: I1206 15:33:00.662253 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:00 crc kubenswrapper[5003]: I1206 15:33:00.662263 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:00 crc kubenswrapper[5003]: I1206 15:33:00.662277 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:00 crc kubenswrapper[5003]: I1206 15:33:00.662288 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:00Z","lastTransitionTime":"2025-12-06T15:33:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:00 crc kubenswrapper[5003]: I1206 15:33:00.711748 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-jmzd9" Dec 06 15:33:00 crc kubenswrapper[5003]: E1206 15:33:00.711866 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-jmzd9" podUID="9fa121e1-7f2f-4912-945f-86cb199c3014" Dec 06 15:33:00 crc kubenswrapper[5003]: I1206 15:33:00.765225 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:00 crc kubenswrapper[5003]: I1206 15:33:00.765264 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:00 crc kubenswrapper[5003]: I1206 15:33:00.765305 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:00 crc kubenswrapper[5003]: I1206 15:33:00.765327 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:00 crc kubenswrapper[5003]: I1206 15:33:00.765336 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:00Z","lastTransitionTime":"2025-12-06T15:33:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:00 crc kubenswrapper[5003]: I1206 15:33:00.868217 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:00 crc kubenswrapper[5003]: I1206 15:33:00.868269 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:00 crc kubenswrapper[5003]: I1206 15:33:00.868281 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:00 crc kubenswrapper[5003]: I1206 15:33:00.868302 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:00 crc kubenswrapper[5003]: I1206 15:33:00.868314 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:00Z","lastTransitionTime":"2025-12-06T15:33:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:00 crc kubenswrapper[5003]: I1206 15:33:00.970949 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:00 crc kubenswrapper[5003]: I1206 15:33:00.970988 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:00 crc kubenswrapper[5003]: I1206 15:33:00.970997 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:00 crc kubenswrapper[5003]: I1206 15:33:00.971012 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:00 crc kubenswrapper[5003]: I1206 15:33:00.971021 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:00Z","lastTransitionTime":"2025-12-06T15:33:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:01 crc kubenswrapper[5003]: I1206 15:33:01.073982 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:01 crc kubenswrapper[5003]: I1206 15:33:01.074033 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:01 crc kubenswrapper[5003]: I1206 15:33:01.074043 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:01 crc kubenswrapper[5003]: I1206 15:33:01.074057 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:01 crc kubenswrapper[5003]: I1206 15:33:01.074067 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:01Z","lastTransitionTime":"2025-12-06T15:33:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:01 crc kubenswrapper[5003]: I1206 15:33:01.178519 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:01 crc kubenswrapper[5003]: I1206 15:33:01.178558 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:01 crc kubenswrapper[5003]: I1206 15:33:01.178570 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:01 crc kubenswrapper[5003]: I1206 15:33:01.178590 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:01 crc kubenswrapper[5003]: I1206 15:33:01.178604 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:01Z","lastTransitionTime":"2025-12-06T15:33:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:01 crc kubenswrapper[5003]: I1206 15:33:01.280580 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:01 crc kubenswrapper[5003]: I1206 15:33:01.280619 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:01 crc kubenswrapper[5003]: I1206 15:33:01.280632 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:01 crc kubenswrapper[5003]: I1206 15:33:01.280648 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:01 crc kubenswrapper[5003]: I1206 15:33:01.280661 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:01Z","lastTransitionTime":"2025-12-06T15:33:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:01 crc kubenswrapper[5003]: I1206 15:33:01.383234 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:01 crc kubenswrapper[5003]: I1206 15:33:01.383268 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:01 crc kubenswrapper[5003]: I1206 15:33:01.383277 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:01 crc kubenswrapper[5003]: I1206 15:33:01.383290 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:01 crc kubenswrapper[5003]: I1206 15:33:01.383299 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:01Z","lastTransitionTime":"2025-12-06T15:33:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:01 crc kubenswrapper[5003]: I1206 15:33:01.485848 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:01 crc kubenswrapper[5003]: I1206 15:33:01.485905 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:01 crc kubenswrapper[5003]: I1206 15:33:01.485919 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:01 crc kubenswrapper[5003]: I1206 15:33:01.485939 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:01 crc kubenswrapper[5003]: I1206 15:33:01.485955 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:01Z","lastTransitionTime":"2025-12-06T15:33:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:01 crc kubenswrapper[5003]: I1206 15:33:01.588156 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:01 crc kubenswrapper[5003]: I1206 15:33:01.588203 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:01 crc kubenswrapper[5003]: I1206 15:33:01.588215 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:01 crc kubenswrapper[5003]: I1206 15:33:01.588231 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:01 crc kubenswrapper[5003]: I1206 15:33:01.588245 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:01Z","lastTransitionTime":"2025-12-06T15:33:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:01 crc kubenswrapper[5003]: I1206 15:33:01.691138 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:01 crc kubenswrapper[5003]: I1206 15:33:01.691688 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:01 crc kubenswrapper[5003]: I1206 15:33:01.691753 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:01 crc kubenswrapper[5003]: I1206 15:33:01.691781 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:01 crc kubenswrapper[5003]: I1206 15:33:01.692123 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:01Z","lastTransitionTime":"2025-12-06T15:33:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:01 crc kubenswrapper[5003]: I1206 15:33:01.711795 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 06 15:33:01 crc kubenswrapper[5003]: I1206 15:33:01.711853 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 06 15:33:01 crc kubenswrapper[5003]: E1206 15:33:01.711954 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 06 15:33:01 crc kubenswrapper[5003]: I1206 15:33:01.712027 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 06 15:33:01 crc kubenswrapper[5003]: E1206 15:33:01.712136 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 06 15:33:01 crc kubenswrapper[5003]: E1206 15:33:01.712331 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 06 15:33:01 crc kubenswrapper[5003]: I1206 15:33:01.732098 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://224b845cf75da20cf87e5435770288110776a0edd92c9c110d6dc539419ecb7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0df37db3b8682032a0065b4ca1cead69bf0b32cb4e2fab89c53e14eb2169b7e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:33:01Z is after 2025-08-24T17:21:41Z" Dec 06 15:33:01 crc kubenswrapper[5003]: I1206 15:33:01.749298 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-j4rf7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7dc41f9e-e763-4a9a-a064-f65bc24332b9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://467dcc8ed3936c6fb24f6c6c3e42eccf3597ce6920d12b253946a123ee22faf8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://79eb73c778f65e6694b0a3243db40d605557c86145f9b40ce2dfcf922ec7f38f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://79eb73c778f65e6694b0a3243db40d605557c86145f9b40ce2dfcf922ec7f38f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://10cf47f05fda3c702decadc5c1cf0b6fb4df56993610f4d7bc2809e685109f6c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://10cf47f05fda3c702decadc5c1cf0b6fb4df56993610f4d7bc2809e685109f6c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a33d5c66fd9234a89ff1f28e4863f80104b5d1caccd24cafa22a4598a42906c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a33d5c66fd9234a89ff1f28e4863f80104b5d1caccd24cafa22a4598a42906c3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://895f9f2647488793d0875e8cb05a6e6515733dae00a856e6a4705702adc3a5ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://895f9f2647488793d0875e8cb05a6e6515733dae00a856e6a4705702adc3a5ed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a02f680341e0dfb569ffcd7902d5b1fe3d575a423ff541d86c7bdd4d76b733b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a02f680341e0dfb569ffcd7902d5b1fe3d575a423ff541d86c7bdd4d76b733b0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f6cda90c763eedc34903164d30c28d3cb696f658455d0a972fbf8d8e5505a587\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f6cda90c763eedc34903164d30c28d3cb696f658455d0a972fbf8d8e5505a587\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-j4rf7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:33:01Z is after 2025-08-24T17:21:41Z" Dec 06 15:33:01 crc kubenswrapper[5003]: I1206 15:33:01.762400 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5ae2d36f-5a31-4da3-aae8-0378c481f230\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9f683cdba22afa5d1d069cc32c6c83addf344c8b0ba3b39b7a2ca527408922c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c777cc5d07ff005aa8001da8a566484710f0253e4a6061e3297a884c6153a7d0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ac8291e7fa9a5ff379474802b6eae771542705aef4c443cc3123837d10dd9e5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae2215e89d65c7bd69288cda74cdd83f0349f57b47f80ff1fa03d60a484558b5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://efc65bc9bb60202069cb51eed80fb62e527399c4189554e791bab3e23993c95c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-06T15:32:20Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1206 15:32:14.957831 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1206 15:32:14.959306 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1213261896/tls.crt::/tmp/serving-cert-1213261896/tls.key\\\\\\\"\\\\nI1206 15:32:20.684925 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1206 15:32:20.690335 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1206 15:32:20.690422 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1206 15:32:20.690568 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1206 15:32:20.690599 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1206 15:32:20.700990 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1206 15:32:20.701016 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1206 15:32:20.701048 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1206 15:32:20.701062 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1206 15:32:20.701075 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1206 15:32:20.701085 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1206 15:32:20.701093 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1206 15:32:20.701100 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1206 15:32:20.702607 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:04Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6013b3a95ef99138e6ea971b51a45f65923c09435a2595bba7af91eebb28d76\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4256540a96bdcd018eb514cf70eea305513bb418afa89c8bfa5179c67822380e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4256540a96bdcd018eb514cf70eea305513bb418afa89c8bfa5179c67822380e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:33:01Z is after 2025-08-24T17:21:41Z" Dec 06 15:33:01 crc kubenswrapper[5003]: I1206 15:33:01.772634 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c48c2555-6e1a-4e2a-801c-de22b016a80d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d0880a937c71f4a7b46ffc4dcc10f50f5db23655dedb5d7e95b36cf0fcb44928\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f5d5d47cea1d06df2f36a0a15126c2419e5051d84842c781921577bec3c65f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5e7cb74b8b6e2a00de6dbbd1689b52fcc3ae9862d40685bab7a805646abd4a3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3377599a5ceebbce7bd9d45a6f78122fc48d86ff917a6dcfc03304ca3361659\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:02Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:33:01Z is after 2025-08-24T17:21:41Z" Dec 06 15:33:01 crc kubenswrapper[5003]: I1206 15:33:01.783295 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa744e792aa97558246eaa95c80a307138b1a6e08fe5de144b3fff62f3932e20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:33:01Z is after 2025-08-24T17:21:41Z" Dec 06 15:33:01 crc kubenswrapper[5003]: I1206 15:33:01.795409 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:33:01Z is after 2025-08-24T17:21:41Z" Dec 06 15:33:01 crc kubenswrapper[5003]: I1206 15:33:01.795545 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:01 crc kubenswrapper[5003]: I1206 15:33:01.795585 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:01 crc kubenswrapper[5003]: I1206 15:33:01.795595 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:01 crc kubenswrapper[5003]: I1206 15:33:01.795609 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:01 crc kubenswrapper[5003]: I1206 15:33:01.795618 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:01Z","lastTransitionTime":"2025-12-06T15:33:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:01 crc kubenswrapper[5003]: I1206 15:33:01.805207 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:24Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:24Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ab9b858ed30211c345e146452a18252752eeedcb3be8054daa2af371297cbb61\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:33:01Z is after 2025-08-24T17:21:41Z" Dec 06 15:33:01 crc kubenswrapper[5003]: I1206 15:33:01.826299 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7xwd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a695d94-271c-45bc-8a89-dfdecb57ec00\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b26a01a6a0644d88028ebc041e00353091f26407f109e02e4aec1ec0e382a4c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b549c3d1ba39068a4f1ce344e0807eed3b4e73ee1902d02cebffe005697201d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5240dcf05f3d661b92408f4c39999571d023d9acdeb31d4a4a99f50e31a627b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e240d517f92fb2b8e24bb6f1c1d98869b41adf8bc3ab687a4426840c7010235e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b871ea5e9d2093579af10786c02da9d7f5afa5351933be742b67b1b682733119\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://276d6d2f680f4e0b2038d12b161e3fd3f85417b5d776b9661559b4812ead1dd9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f0ad17b1a040807cb7cca5ee53ef54da1870f63cadccb3aa20421b08016cf10\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4c6884100d6c3b4944019e7d9e7a2bdaf2013d697ce3980863742a60a7b52d7f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-06T15:32:39Z\\\",\\\"message\\\":\\\"kipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}, built lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-marketplace/redhat-marketplace_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-marketplace/redhat-marketplace\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.140\\\\\\\", Port:50051, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI1206 15:32:39.139098 6445 ovnkube.go:599] Stopped ovnkube\\\\nI1206 15:32:39.139178 6445 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1206 15:32:39.139236 6445 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to s\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:38Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2f0ad17b1a040807cb7cca5ee53ef54da1870f63cadccb3aa20421b08016cf10\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-06T15:32:53Z\\\",\\\"message\\\":\\\"d44-bbd8-dba87b7dbaf0}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1206 15:32:53.311445 6646 ovn.go:134] Ensuring zone local for Pod openshift-multus/multus-additional-cni-plugins-j4rf7 in node crc\\\\nI1206 15:32:53.311457 6646 obj_retry.go:386] Retry successful for *v1.Pod openshift-multus/multus-additional-cni-plugins-j4rf7 after 0 failed attempt(s)\\\\nI1206 15:32:53.311473 6646 default_network_controller.go:776] Recording success event on pod openshift-multus/multus-additional-cni-plugins-j4rf7\\\\nF1206 15:32:53.311504 6646 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:53Z is after 2025-08-24\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c56c8c7aa570bb81c41923b20b09d6de6b278e56ca466e7e0ee3cecf113c37d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a00902f2c2c7ca56d86553f78094f40f59ea9ef125f5a388aafd6d7fd0c20d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9a00902f2c2c7ca56d86553f78094f40f59ea9ef125f5a388aafd6d7fd0c20d9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-p7xwd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:33:01Z is after 2025-08-24T17:21:41Z" Dec 06 15:33:01 crc kubenswrapper[5003]: I1206 15:33:01.838524 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-jmzd9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9fa121e1-7f2f-4912-945f-86cb199c3014\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:41Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:41Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9s4lw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9s4lw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:41Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-jmzd9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:33:01Z is after 2025-08-24T17:21:41Z" Dec 06 15:33:01 crc kubenswrapper[5003]: I1206 15:33:01.852055 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:33:01Z is after 2025-08-24T17:21:41Z" Dec 06 15:33:01 crc kubenswrapper[5003]: I1206 15:33:01.863400 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-qcqkl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"956ed4a8-8918-48eb-a2f8-f35a9ae17fde\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8ae5ee47d26422cffca5f12a4ee1455dcd34c148b1b490d69b88280b4497da11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbhrw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-qcqkl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:33:01Z is after 2025-08-24T17:21:41Z" Dec 06 15:33:01 crc kubenswrapper[5003]: I1206 15:33:01.876881 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-9kdpn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"350e8b9a-b7bf-4dc9-abe9-d10f7a088be3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e9b32883e20da5b298374f7eebd0a21c5ca51cca23543fc78dfe3edb04e3b661\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-46c8r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-9kdpn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:33:01Z is after 2025-08-24T17:21:41Z" Dec 06 15:33:01 crc kubenswrapper[5003]: I1206 15:33:01.893626 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-7788j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f27a64b8-ecd1-4201-a3bd-a4f5a0aa05a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4bf147399c10dd7e654abd5213c4d90e8aa9feca7f8c6032c16576a25aeace68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twj7c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f47c00cc6d341dbe1a5f3495f04fe4e695370952f6d6b209a5aaff1ace9d17e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twj7c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:39Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-7788j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:33:01Z is after 2025-08-24T17:21:41Z" Dec 06 15:33:01 crc kubenswrapper[5003]: I1206 15:33:01.899985 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:01 crc kubenswrapper[5003]: I1206 15:33:01.900017 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:01 crc kubenswrapper[5003]: I1206 15:33:01.900028 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:01 crc kubenswrapper[5003]: I1206 15:33:01.900045 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:01 crc kubenswrapper[5003]: I1206 15:33:01.900057 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:01Z","lastTransitionTime":"2025-12-06T15:33:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:01 crc kubenswrapper[5003]: I1206 15:33:01.905481 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f25cfd72-ae1c-45c6-bf50-3f3cd455c1ad\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://15818f84cfa472a42a18bafe5ff4a71da326b2f5871f47e693d5e1a1b3c8b986\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8c1591171f15bdf52339cb914e52de4dad9c34f1a6b6bb882f15bb41308a5b7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://94df3e8c0295aedc3bf7b97296d443b5240d17fcd83f8e8cf1bc9730740d7f29\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3bd16db35730810799b00301bb8a68f91d69dc93dac04638d5c187bdd374393d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3bd16db35730810799b00301bb8a68f91d69dc93dac04638d5c187bdd374393d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:02Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:02Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:33:01Z is after 2025-08-24T17:21:41Z" Dec 06 15:33:01 crc kubenswrapper[5003]: I1206 15:33:01.917099 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:33:01Z is after 2025-08-24T17:21:41Z" Dec 06 15:33:01 crc kubenswrapper[5003]: I1206 15:33:01.926695 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-mdz5n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2a9b6e3b-054f-40de-9ad6-dd7eb78eaea1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cac97fc8a54c2fbd81b0abade11a987e5226084d8a4f75ddbbaad1f6c4cb9783\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wqmpg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:25Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-mdz5n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:33:01Z is after 2025-08-24T17:21:41Z" Dec 06 15:33:01 crc kubenswrapper[5003]: I1206 15:33:01.936197 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-w25db" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1a047c4d-003e-4668-9b96-945eab34ab68\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b3a6db6c2c8e6a283f7b46ef28ba7310d3cfc7054cfd8cbcc290ebd0d26dcf4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6dn2k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b457ba1d45f5e1524b8539c4c7dc77b24f6d5aa8172b46962e31e4fb6723b7ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6dn2k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:26Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-w25db\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:33:01Z is after 2025-08-24T17:21:41Z" Dec 06 15:33:02 crc kubenswrapper[5003]: I1206 15:33:02.002476 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:02 crc kubenswrapper[5003]: I1206 15:33:02.002545 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:02 crc kubenswrapper[5003]: I1206 15:33:02.002557 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:02 crc kubenswrapper[5003]: I1206 15:33:02.002572 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:02 crc kubenswrapper[5003]: I1206 15:33:02.002584 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:02Z","lastTransitionTime":"2025-12-06T15:33:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:02 crc kubenswrapper[5003]: I1206 15:33:02.105088 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:02 crc kubenswrapper[5003]: I1206 15:33:02.105135 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:02 crc kubenswrapper[5003]: I1206 15:33:02.105146 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:02 crc kubenswrapper[5003]: I1206 15:33:02.105163 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:02 crc kubenswrapper[5003]: I1206 15:33:02.105175 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:02Z","lastTransitionTime":"2025-12-06T15:33:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:02 crc kubenswrapper[5003]: I1206 15:33:02.207028 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:02 crc kubenswrapper[5003]: I1206 15:33:02.207068 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:02 crc kubenswrapper[5003]: I1206 15:33:02.207077 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:02 crc kubenswrapper[5003]: I1206 15:33:02.207091 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:02 crc kubenswrapper[5003]: I1206 15:33:02.207101 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:02Z","lastTransitionTime":"2025-12-06T15:33:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:02 crc kubenswrapper[5003]: I1206 15:33:02.310457 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:02 crc kubenswrapper[5003]: I1206 15:33:02.310652 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:02 crc kubenswrapper[5003]: I1206 15:33:02.310683 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:02 crc kubenswrapper[5003]: I1206 15:33:02.310729 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:02 crc kubenswrapper[5003]: I1206 15:33:02.310756 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:02Z","lastTransitionTime":"2025-12-06T15:33:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:02 crc kubenswrapper[5003]: I1206 15:33:02.413699 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:02 crc kubenswrapper[5003]: I1206 15:33:02.413748 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:02 crc kubenswrapper[5003]: I1206 15:33:02.413763 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:02 crc kubenswrapper[5003]: I1206 15:33:02.413785 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:02 crc kubenswrapper[5003]: I1206 15:33:02.413803 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:02Z","lastTransitionTime":"2025-12-06T15:33:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:02 crc kubenswrapper[5003]: I1206 15:33:02.516712 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:02 crc kubenswrapper[5003]: I1206 15:33:02.516758 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:02 crc kubenswrapper[5003]: I1206 15:33:02.516777 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:02 crc kubenswrapper[5003]: I1206 15:33:02.516799 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:02 crc kubenswrapper[5003]: I1206 15:33:02.516815 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:02Z","lastTransitionTime":"2025-12-06T15:33:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:02 crc kubenswrapper[5003]: I1206 15:33:02.618807 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:02 crc kubenswrapper[5003]: I1206 15:33:02.618840 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:02 crc kubenswrapper[5003]: I1206 15:33:02.618849 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:02 crc kubenswrapper[5003]: I1206 15:33:02.618862 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:02 crc kubenswrapper[5003]: I1206 15:33:02.618871 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:02Z","lastTransitionTime":"2025-12-06T15:33:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:02 crc kubenswrapper[5003]: I1206 15:33:02.712081 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-jmzd9" Dec 06 15:33:02 crc kubenswrapper[5003]: E1206 15:33:02.712239 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-jmzd9" podUID="9fa121e1-7f2f-4912-945f-86cb199c3014" Dec 06 15:33:02 crc kubenswrapper[5003]: I1206 15:33:02.720557 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:02 crc kubenswrapper[5003]: I1206 15:33:02.720605 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:02 crc kubenswrapper[5003]: I1206 15:33:02.720619 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:02 crc kubenswrapper[5003]: I1206 15:33:02.720636 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:02 crc kubenswrapper[5003]: I1206 15:33:02.720646 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:02Z","lastTransitionTime":"2025-12-06T15:33:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:02 crc kubenswrapper[5003]: I1206 15:33:02.823439 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:02 crc kubenswrapper[5003]: I1206 15:33:02.823521 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:02 crc kubenswrapper[5003]: I1206 15:33:02.823538 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:02 crc kubenswrapper[5003]: I1206 15:33:02.823559 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:02 crc kubenswrapper[5003]: I1206 15:33:02.823574 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:02Z","lastTransitionTime":"2025-12-06T15:33:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:02 crc kubenswrapper[5003]: I1206 15:33:02.926323 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:02 crc kubenswrapper[5003]: I1206 15:33:02.926353 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:02 crc kubenswrapper[5003]: I1206 15:33:02.926360 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:02 crc kubenswrapper[5003]: I1206 15:33:02.926373 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:02 crc kubenswrapper[5003]: I1206 15:33:02.926381 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:02Z","lastTransitionTime":"2025-12-06T15:33:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:03 crc kubenswrapper[5003]: I1206 15:33:03.028879 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:03 crc kubenswrapper[5003]: I1206 15:33:03.029514 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:03 crc kubenswrapper[5003]: I1206 15:33:03.029607 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:03 crc kubenswrapper[5003]: I1206 15:33:03.029695 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:03 crc kubenswrapper[5003]: I1206 15:33:03.029788 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:03Z","lastTransitionTime":"2025-12-06T15:33:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:03 crc kubenswrapper[5003]: I1206 15:33:03.132586 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:03 crc kubenswrapper[5003]: I1206 15:33:03.132628 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:03 crc kubenswrapper[5003]: I1206 15:33:03.132639 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:03 crc kubenswrapper[5003]: I1206 15:33:03.132655 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:03 crc kubenswrapper[5003]: I1206 15:33:03.132669 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:03Z","lastTransitionTime":"2025-12-06T15:33:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:03 crc kubenswrapper[5003]: I1206 15:33:03.235181 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:03 crc kubenswrapper[5003]: I1206 15:33:03.235209 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:03 crc kubenswrapper[5003]: I1206 15:33:03.235217 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:03 crc kubenswrapper[5003]: I1206 15:33:03.235229 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:03 crc kubenswrapper[5003]: I1206 15:33:03.235237 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:03Z","lastTransitionTime":"2025-12-06T15:33:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:03 crc kubenswrapper[5003]: I1206 15:33:03.342055 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:03 crc kubenswrapper[5003]: I1206 15:33:03.342134 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:03 crc kubenswrapper[5003]: I1206 15:33:03.342173 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:03 crc kubenswrapper[5003]: I1206 15:33:03.342203 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:03 crc kubenswrapper[5003]: I1206 15:33:03.342225 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:03Z","lastTransitionTime":"2025-12-06T15:33:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:03 crc kubenswrapper[5003]: I1206 15:33:03.444520 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:03 crc kubenswrapper[5003]: I1206 15:33:03.444555 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:03 crc kubenswrapper[5003]: I1206 15:33:03.444564 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:03 crc kubenswrapper[5003]: I1206 15:33:03.444582 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:03 crc kubenswrapper[5003]: I1206 15:33:03.444594 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:03Z","lastTransitionTime":"2025-12-06T15:33:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:03 crc kubenswrapper[5003]: I1206 15:33:03.546636 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:03 crc kubenswrapper[5003]: I1206 15:33:03.546681 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:03 crc kubenswrapper[5003]: I1206 15:33:03.546690 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:03 crc kubenswrapper[5003]: I1206 15:33:03.546705 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:03 crc kubenswrapper[5003]: I1206 15:33:03.546714 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:03Z","lastTransitionTime":"2025-12-06T15:33:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:03 crc kubenswrapper[5003]: I1206 15:33:03.652025 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:03 crc kubenswrapper[5003]: I1206 15:33:03.652060 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:03 crc kubenswrapper[5003]: I1206 15:33:03.652079 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:03 crc kubenswrapper[5003]: I1206 15:33:03.652115 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:03 crc kubenswrapper[5003]: I1206 15:33:03.652130 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:03Z","lastTransitionTime":"2025-12-06T15:33:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:03 crc kubenswrapper[5003]: I1206 15:33:03.711694 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 06 15:33:03 crc kubenswrapper[5003]: I1206 15:33:03.711718 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 06 15:33:03 crc kubenswrapper[5003]: E1206 15:33:03.711821 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 06 15:33:03 crc kubenswrapper[5003]: I1206 15:33:03.711836 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 06 15:33:03 crc kubenswrapper[5003]: E1206 15:33:03.711897 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 06 15:33:03 crc kubenswrapper[5003]: E1206 15:33:03.711965 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 06 15:33:03 crc kubenswrapper[5003]: I1206 15:33:03.753852 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:03 crc kubenswrapper[5003]: I1206 15:33:03.753887 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:03 crc kubenswrapper[5003]: I1206 15:33:03.753898 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:03 crc kubenswrapper[5003]: I1206 15:33:03.753912 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:03 crc kubenswrapper[5003]: I1206 15:33:03.753922 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:03Z","lastTransitionTime":"2025-12-06T15:33:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:03 crc kubenswrapper[5003]: I1206 15:33:03.855619 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:03 crc kubenswrapper[5003]: I1206 15:33:03.855660 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:03 crc kubenswrapper[5003]: I1206 15:33:03.855673 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:03 crc kubenswrapper[5003]: I1206 15:33:03.855688 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:03 crc kubenswrapper[5003]: I1206 15:33:03.855699 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:03Z","lastTransitionTime":"2025-12-06T15:33:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:03 crc kubenswrapper[5003]: I1206 15:33:03.958413 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:03 crc kubenswrapper[5003]: I1206 15:33:03.958523 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:03 crc kubenswrapper[5003]: I1206 15:33:03.958552 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:03 crc kubenswrapper[5003]: I1206 15:33:03.958583 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:03 crc kubenswrapper[5003]: I1206 15:33:03.958637 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:03Z","lastTransitionTime":"2025-12-06T15:33:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:04 crc kubenswrapper[5003]: I1206 15:33:04.061764 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:04 crc kubenswrapper[5003]: I1206 15:33:04.061857 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:04 crc kubenswrapper[5003]: I1206 15:33:04.061876 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:04 crc kubenswrapper[5003]: I1206 15:33:04.062427 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:04 crc kubenswrapper[5003]: I1206 15:33:04.062471 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:04Z","lastTransitionTime":"2025-12-06T15:33:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:04 crc kubenswrapper[5003]: I1206 15:33:04.165818 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:04 crc kubenswrapper[5003]: I1206 15:33:04.166260 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:04 crc kubenswrapper[5003]: I1206 15:33:04.166624 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:04 crc kubenswrapper[5003]: I1206 15:33:04.166823 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:04 crc kubenswrapper[5003]: I1206 15:33:04.166964 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:04Z","lastTransitionTime":"2025-12-06T15:33:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:04 crc kubenswrapper[5003]: I1206 15:33:04.269839 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:04 crc kubenswrapper[5003]: I1206 15:33:04.269898 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:04 crc kubenswrapper[5003]: I1206 15:33:04.269917 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:04 crc kubenswrapper[5003]: I1206 15:33:04.269939 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:04 crc kubenswrapper[5003]: I1206 15:33:04.269953 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:04Z","lastTransitionTime":"2025-12-06T15:33:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:04 crc kubenswrapper[5003]: I1206 15:33:04.373236 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:04 crc kubenswrapper[5003]: I1206 15:33:04.373277 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:04 crc kubenswrapper[5003]: I1206 15:33:04.373289 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:04 crc kubenswrapper[5003]: I1206 15:33:04.373306 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:04 crc kubenswrapper[5003]: I1206 15:33:04.373347 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:04Z","lastTransitionTime":"2025-12-06T15:33:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:04 crc kubenswrapper[5003]: I1206 15:33:04.476006 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:04 crc kubenswrapper[5003]: I1206 15:33:04.476041 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:04 crc kubenswrapper[5003]: I1206 15:33:04.476051 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:04 crc kubenswrapper[5003]: I1206 15:33:04.476067 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:04 crc kubenswrapper[5003]: I1206 15:33:04.476079 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:04Z","lastTransitionTime":"2025-12-06T15:33:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:04 crc kubenswrapper[5003]: I1206 15:33:04.578652 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:04 crc kubenswrapper[5003]: I1206 15:33:04.578698 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:04 crc kubenswrapper[5003]: I1206 15:33:04.578710 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:04 crc kubenswrapper[5003]: I1206 15:33:04.578729 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:04 crc kubenswrapper[5003]: I1206 15:33:04.578742 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:04Z","lastTransitionTime":"2025-12-06T15:33:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:04 crc kubenswrapper[5003]: I1206 15:33:04.681471 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:04 crc kubenswrapper[5003]: I1206 15:33:04.681809 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:04 crc kubenswrapper[5003]: I1206 15:33:04.681829 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:04 crc kubenswrapper[5003]: I1206 15:33:04.682012 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:04 crc kubenswrapper[5003]: I1206 15:33:04.682041 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:04Z","lastTransitionTime":"2025-12-06T15:33:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:04 crc kubenswrapper[5003]: I1206 15:33:04.712029 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-jmzd9" Dec 06 15:33:04 crc kubenswrapper[5003]: E1206 15:33:04.712555 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-jmzd9" podUID="9fa121e1-7f2f-4912-945f-86cb199c3014" Dec 06 15:33:04 crc kubenswrapper[5003]: I1206 15:33:04.784603 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:04 crc kubenswrapper[5003]: I1206 15:33:04.785004 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:04 crc kubenswrapper[5003]: I1206 15:33:04.785078 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:04 crc kubenswrapper[5003]: I1206 15:33:04.785150 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:04 crc kubenswrapper[5003]: I1206 15:33:04.785225 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:04Z","lastTransitionTime":"2025-12-06T15:33:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:04 crc kubenswrapper[5003]: I1206 15:33:04.888898 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:04 crc kubenswrapper[5003]: I1206 15:33:04.888952 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:04 crc kubenswrapper[5003]: I1206 15:33:04.888962 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:04 crc kubenswrapper[5003]: I1206 15:33:04.888982 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:04 crc kubenswrapper[5003]: I1206 15:33:04.888994 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:04Z","lastTransitionTime":"2025-12-06T15:33:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:04 crc kubenswrapper[5003]: I1206 15:33:04.991296 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:04 crc kubenswrapper[5003]: I1206 15:33:04.991353 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:04 crc kubenswrapper[5003]: I1206 15:33:04.991365 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:04 crc kubenswrapper[5003]: I1206 15:33:04.991385 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:04 crc kubenswrapper[5003]: I1206 15:33:04.991399 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:04Z","lastTransitionTime":"2025-12-06T15:33:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:05 crc kubenswrapper[5003]: I1206 15:33:05.094348 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:05 crc kubenswrapper[5003]: I1206 15:33:05.094402 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:05 crc kubenswrapper[5003]: I1206 15:33:05.094412 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:05 crc kubenswrapper[5003]: I1206 15:33:05.094435 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:05 crc kubenswrapper[5003]: I1206 15:33:05.094450 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:05Z","lastTransitionTime":"2025-12-06T15:33:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:05 crc kubenswrapper[5003]: I1206 15:33:05.196291 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:05 crc kubenswrapper[5003]: I1206 15:33:05.196350 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:05 crc kubenswrapper[5003]: I1206 15:33:05.196364 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:05 crc kubenswrapper[5003]: I1206 15:33:05.196393 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:05 crc kubenswrapper[5003]: I1206 15:33:05.196407 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:05Z","lastTransitionTime":"2025-12-06T15:33:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:05 crc kubenswrapper[5003]: I1206 15:33:05.299745 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:05 crc kubenswrapper[5003]: I1206 15:33:05.299810 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:05 crc kubenswrapper[5003]: I1206 15:33:05.299824 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:05 crc kubenswrapper[5003]: I1206 15:33:05.299844 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:05 crc kubenswrapper[5003]: I1206 15:33:05.299856 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:05Z","lastTransitionTime":"2025-12-06T15:33:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:05 crc kubenswrapper[5003]: I1206 15:33:05.403142 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:05 crc kubenswrapper[5003]: I1206 15:33:05.403192 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:05 crc kubenswrapper[5003]: I1206 15:33:05.403203 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:05 crc kubenswrapper[5003]: I1206 15:33:05.403221 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:05 crc kubenswrapper[5003]: I1206 15:33:05.403233 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:05Z","lastTransitionTime":"2025-12-06T15:33:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:05 crc kubenswrapper[5003]: I1206 15:33:05.506219 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:05 crc kubenswrapper[5003]: I1206 15:33:05.506313 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:05 crc kubenswrapper[5003]: I1206 15:33:05.506330 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:05 crc kubenswrapper[5003]: I1206 15:33:05.506348 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:05 crc kubenswrapper[5003]: I1206 15:33:05.506358 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:05Z","lastTransitionTime":"2025-12-06T15:33:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:05 crc kubenswrapper[5003]: I1206 15:33:05.609173 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:05 crc kubenswrapper[5003]: I1206 15:33:05.609230 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:05 crc kubenswrapper[5003]: I1206 15:33:05.609242 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:05 crc kubenswrapper[5003]: I1206 15:33:05.609264 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:05 crc kubenswrapper[5003]: I1206 15:33:05.609276 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:05Z","lastTransitionTime":"2025-12-06T15:33:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:05 crc kubenswrapper[5003]: I1206 15:33:05.711385 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 06 15:33:05 crc kubenswrapper[5003]: E1206 15:33:05.711481 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 06 15:33:05 crc kubenswrapper[5003]: I1206 15:33:05.711573 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 06 15:33:05 crc kubenswrapper[5003]: I1206 15:33:05.711589 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 06 15:33:05 crc kubenswrapper[5003]: E1206 15:33:05.711614 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 06 15:33:05 crc kubenswrapper[5003]: I1206 15:33:05.711699 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:05 crc kubenswrapper[5003]: I1206 15:33:05.711730 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:05 crc kubenswrapper[5003]: I1206 15:33:05.711740 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:05 crc kubenswrapper[5003]: I1206 15:33:05.711755 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:05 crc kubenswrapper[5003]: E1206 15:33:05.711764 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 06 15:33:05 crc kubenswrapper[5003]: I1206 15:33:05.711764 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:05Z","lastTransitionTime":"2025-12-06T15:33:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:05 crc kubenswrapper[5003]: I1206 15:33:05.813986 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:05 crc kubenswrapper[5003]: I1206 15:33:05.814091 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:05 crc kubenswrapper[5003]: I1206 15:33:05.814105 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:05 crc kubenswrapper[5003]: I1206 15:33:05.814130 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:05 crc kubenswrapper[5003]: I1206 15:33:05.814142 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:05Z","lastTransitionTime":"2025-12-06T15:33:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:05 crc kubenswrapper[5003]: I1206 15:33:05.838067 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:05 crc kubenswrapper[5003]: I1206 15:33:05.838126 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:05 crc kubenswrapper[5003]: I1206 15:33:05.838141 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:05 crc kubenswrapper[5003]: I1206 15:33:05.838161 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:05 crc kubenswrapper[5003]: I1206 15:33:05.838173 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:05Z","lastTransitionTime":"2025-12-06T15:33:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:05 crc kubenswrapper[5003]: E1206 15:33:05.851749 5003 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-06T15:33:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-06T15:33:05Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-06T15:33:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-06T15:33:05Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-06T15:33:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-06T15:33:05Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-06T15:33:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-06T15:33:05Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"010ec561-c8bb-454b-ab74-658add58caba\\\",\\\"systemUUID\\\":\\\"42b9e6db-6fd4-4e84-a5b7-176f28bfe2f1\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:33:05Z is after 2025-08-24T17:21:41Z" Dec 06 15:33:05 crc kubenswrapper[5003]: I1206 15:33:05.856315 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:05 crc kubenswrapper[5003]: I1206 15:33:05.856380 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:05 crc kubenswrapper[5003]: I1206 15:33:05.856446 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:05 crc kubenswrapper[5003]: I1206 15:33:05.856472 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:05 crc kubenswrapper[5003]: I1206 15:33:05.856512 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:05Z","lastTransitionTime":"2025-12-06T15:33:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:05 crc kubenswrapper[5003]: E1206 15:33:05.869947 5003 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-06T15:33:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-06T15:33:05Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-06T15:33:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-06T15:33:05Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-06T15:33:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-06T15:33:05Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-06T15:33:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-06T15:33:05Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"010ec561-c8bb-454b-ab74-658add58caba\\\",\\\"systemUUID\\\":\\\"42b9e6db-6fd4-4e84-a5b7-176f28bfe2f1\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:33:05Z is after 2025-08-24T17:21:41Z" Dec 06 15:33:05 crc kubenswrapper[5003]: I1206 15:33:05.874271 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:05 crc kubenswrapper[5003]: I1206 15:33:05.874321 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:05 crc kubenswrapper[5003]: I1206 15:33:05.874332 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:05 crc kubenswrapper[5003]: I1206 15:33:05.874349 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:05 crc kubenswrapper[5003]: I1206 15:33:05.874359 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:05Z","lastTransitionTime":"2025-12-06T15:33:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:05 crc kubenswrapper[5003]: E1206 15:33:05.886466 5003 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-06T15:33:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-06T15:33:05Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-06T15:33:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-06T15:33:05Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-06T15:33:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-06T15:33:05Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-06T15:33:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-06T15:33:05Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"010ec561-c8bb-454b-ab74-658add58caba\\\",\\\"systemUUID\\\":\\\"42b9e6db-6fd4-4e84-a5b7-176f28bfe2f1\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:33:05Z is after 2025-08-24T17:21:41Z" Dec 06 15:33:05 crc kubenswrapper[5003]: I1206 15:33:05.889738 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:05 crc kubenswrapper[5003]: I1206 15:33:05.889792 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:05 crc kubenswrapper[5003]: I1206 15:33:05.889804 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:05 crc kubenswrapper[5003]: I1206 15:33:05.889827 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:05 crc kubenswrapper[5003]: I1206 15:33:05.889841 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:05Z","lastTransitionTime":"2025-12-06T15:33:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:05 crc kubenswrapper[5003]: E1206 15:33:05.902175 5003 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-06T15:33:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-06T15:33:05Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-06T15:33:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-06T15:33:05Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-06T15:33:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-06T15:33:05Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-06T15:33:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-06T15:33:05Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"010ec561-c8bb-454b-ab74-658add58caba\\\",\\\"systemUUID\\\":\\\"42b9e6db-6fd4-4e84-a5b7-176f28bfe2f1\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:33:05Z is after 2025-08-24T17:21:41Z" Dec 06 15:33:05 crc kubenswrapper[5003]: I1206 15:33:05.905662 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:05 crc kubenswrapper[5003]: I1206 15:33:05.905703 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:05 crc kubenswrapper[5003]: I1206 15:33:05.905734 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:05 crc kubenswrapper[5003]: I1206 15:33:05.905756 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:05 crc kubenswrapper[5003]: I1206 15:33:05.905771 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:05Z","lastTransitionTime":"2025-12-06T15:33:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:05 crc kubenswrapper[5003]: E1206 15:33:05.919622 5003 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-06T15:33:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-06T15:33:05Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-06T15:33:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-06T15:33:05Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-06T15:33:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-06T15:33:05Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-06T15:33:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-06T15:33:05Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"010ec561-c8bb-454b-ab74-658add58caba\\\",\\\"systemUUID\\\":\\\"42b9e6db-6fd4-4e84-a5b7-176f28bfe2f1\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:33:05Z is after 2025-08-24T17:21:41Z" Dec 06 15:33:05 crc kubenswrapper[5003]: E1206 15:33:05.919782 5003 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 06 15:33:05 crc kubenswrapper[5003]: I1206 15:33:05.921830 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:05 crc kubenswrapper[5003]: I1206 15:33:05.921865 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:05 crc kubenswrapper[5003]: I1206 15:33:05.921878 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:05 crc kubenswrapper[5003]: I1206 15:33:05.921898 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:05 crc kubenswrapper[5003]: I1206 15:33:05.921912 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:05Z","lastTransitionTime":"2025-12-06T15:33:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:06 crc kubenswrapper[5003]: I1206 15:33:06.023959 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:06 crc kubenswrapper[5003]: I1206 15:33:06.023998 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:06 crc kubenswrapper[5003]: I1206 15:33:06.024007 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:06 crc kubenswrapper[5003]: I1206 15:33:06.024022 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:06 crc kubenswrapper[5003]: I1206 15:33:06.024032 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:06Z","lastTransitionTime":"2025-12-06T15:33:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:06 crc kubenswrapper[5003]: I1206 15:33:06.126456 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:06 crc kubenswrapper[5003]: I1206 15:33:06.126522 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:06 crc kubenswrapper[5003]: I1206 15:33:06.126534 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:06 crc kubenswrapper[5003]: I1206 15:33:06.126549 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:06 crc kubenswrapper[5003]: I1206 15:33:06.126562 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:06Z","lastTransitionTime":"2025-12-06T15:33:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:06 crc kubenswrapper[5003]: I1206 15:33:06.229396 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:06 crc kubenswrapper[5003]: I1206 15:33:06.229433 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:06 crc kubenswrapper[5003]: I1206 15:33:06.229447 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:06 crc kubenswrapper[5003]: I1206 15:33:06.229466 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:06 crc kubenswrapper[5003]: I1206 15:33:06.229476 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:06Z","lastTransitionTime":"2025-12-06T15:33:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:06 crc kubenswrapper[5003]: I1206 15:33:06.332298 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:06 crc kubenswrapper[5003]: I1206 15:33:06.332342 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:06 crc kubenswrapper[5003]: I1206 15:33:06.332350 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:06 crc kubenswrapper[5003]: I1206 15:33:06.332370 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:06 crc kubenswrapper[5003]: I1206 15:33:06.332379 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:06Z","lastTransitionTime":"2025-12-06T15:33:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:06 crc kubenswrapper[5003]: I1206 15:33:06.434982 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:06 crc kubenswrapper[5003]: I1206 15:33:06.435049 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:06 crc kubenswrapper[5003]: I1206 15:33:06.435063 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:06 crc kubenswrapper[5003]: I1206 15:33:06.435107 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:06 crc kubenswrapper[5003]: I1206 15:33:06.435122 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:06Z","lastTransitionTime":"2025-12-06T15:33:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:06 crc kubenswrapper[5003]: I1206 15:33:06.537271 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:06 crc kubenswrapper[5003]: I1206 15:33:06.537329 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:06 crc kubenswrapper[5003]: I1206 15:33:06.537341 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:06 crc kubenswrapper[5003]: I1206 15:33:06.537356 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:06 crc kubenswrapper[5003]: I1206 15:33:06.537367 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:06Z","lastTransitionTime":"2025-12-06T15:33:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:06 crc kubenswrapper[5003]: I1206 15:33:06.640122 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:06 crc kubenswrapper[5003]: I1206 15:33:06.640169 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:06 crc kubenswrapper[5003]: I1206 15:33:06.640181 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:06 crc kubenswrapper[5003]: I1206 15:33:06.640200 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:06 crc kubenswrapper[5003]: I1206 15:33:06.640213 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:06Z","lastTransitionTime":"2025-12-06T15:33:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:06 crc kubenswrapper[5003]: I1206 15:33:06.711292 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-jmzd9" Dec 06 15:33:06 crc kubenswrapper[5003]: E1206 15:33:06.711429 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-jmzd9" podUID="9fa121e1-7f2f-4912-945f-86cb199c3014" Dec 06 15:33:06 crc kubenswrapper[5003]: I1206 15:33:06.742308 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:06 crc kubenswrapper[5003]: I1206 15:33:06.742368 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:06 crc kubenswrapper[5003]: I1206 15:33:06.742382 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:06 crc kubenswrapper[5003]: I1206 15:33:06.742405 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:06 crc kubenswrapper[5003]: I1206 15:33:06.742417 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:06Z","lastTransitionTime":"2025-12-06T15:33:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:06 crc kubenswrapper[5003]: I1206 15:33:06.845059 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:06 crc kubenswrapper[5003]: I1206 15:33:06.845099 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:06 crc kubenswrapper[5003]: I1206 15:33:06.845110 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:06 crc kubenswrapper[5003]: I1206 15:33:06.845124 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:06 crc kubenswrapper[5003]: I1206 15:33:06.845136 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:06Z","lastTransitionTime":"2025-12-06T15:33:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:06 crc kubenswrapper[5003]: I1206 15:33:06.947010 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:06 crc kubenswrapper[5003]: I1206 15:33:06.947073 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:06 crc kubenswrapper[5003]: I1206 15:33:06.947087 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:06 crc kubenswrapper[5003]: I1206 15:33:06.947103 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:06 crc kubenswrapper[5003]: I1206 15:33:06.947114 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:06Z","lastTransitionTime":"2025-12-06T15:33:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:07 crc kubenswrapper[5003]: I1206 15:33:07.049437 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:07 crc kubenswrapper[5003]: I1206 15:33:07.049506 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:07 crc kubenswrapper[5003]: I1206 15:33:07.049525 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:07 crc kubenswrapper[5003]: I1206 15:33:07.049548 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:07 crc kubenswrapper[5003]: I1206 15:33:07.049562 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:07Z","lastTransitionTime":"2025-12-06T15:33:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:07 crc kubenswrapper[5003]: I1206 15:33:07.151644 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:07 crc kubenswrapper[5003]: I1206 15:33:07.151699 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:07 crc kubenswrapper[5003]: I1206 15:33:07.151709 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:07 crc kubenswrapper[5003]: I1206 15:33:07.151723 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:07 crc kubenswrapper[5003]: I1206 15:33:07.151749 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:07Z","lastTransitionTime":"2025-12-06T15:33:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:07 crc kubenswrapper[5003]: I1206 15:33:07.254112 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:07 crc kubenswrapper[5003]: I1206 15:33:07.254177 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:07 crc kubenswrapper[5003]: I1206 15:33:07.254191 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:07 crc kubenswrapper[5003]: I1206 15:33:07.254218 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:07 crc kubenswrapper[5003]: I1206 15:33:07.254237 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:07Z","lastTransitionTime":"2025-12-06T15:33:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:07 crc kubenswrapper[5003]: I1206 15:33:07.357288 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:07 crc kubenswrapper[5003]: I1206 15:33:07.357335 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:07 crc kubenswrapper[5003]: I1206 15:33:07.357343 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:07 crc kubenswrapper[5003]: I1206 15:33:07.357359 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:07 crc kubenswrapper[5003]: I1206 15:33:07.357370 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:07Z","lastTransitionTime":"2025-12-06T15:33:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:07 crc kubenswrapper[5003]: I1206 15:33:07.460310 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:07 crc kubenswrapper[5003]: I1206 15:33:07.460362 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:07 crc kubenswrapper[5003]: I1206 15:33:07.460374 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:07 crc kubenswrapper[5003]: I1206 15:33:07.460390 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:07 crc kubenswrapper[5003]: I1206 15:33:07.460402 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:07Z","lastTransitionTime":"2025-12-06T15:33:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:07 crc kubenswrapper[5003]: I1206 15:33:07.563337 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:07 crc kubenswrapper[5003]: I1206 15:33:07.563397 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:07 crc kubenswrapper[5003]: I1206 15:33:07.563415 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:07 crc kubenswrapper[5003]: I1206 15:33:07.563439 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:07 crc kubenswrapper[5003]: I1206 15:33:07.563458 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:07Z","lastTransitionTime":"2025-12-06T15:33:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:07 crc kubenswrapper[5003]: I1206 15:33:07.666183 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:07 crc kubenswrapper[5003]: I1206 15:33:07.666267 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:07 crc kubenswrapper[5003]: I1206 15:33:07.666280 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:07 crc kubenswrapper[5003]: I1206 15:33:07.666295 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:07 crc kubenswrapper[5003]: I1206 15:33:07.666306 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:07Z","lastTransitionTime":"2025-12-06T15:33:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:07 crc kubenswrapper[5003]: I1206 15:33:07.712210 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 06 15:33:07 crc kubenswrapper[5003]: I1206 15:33:07.712303 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 06 15:33:07 crc kubenswrapper[5003]: E1206 15:33:07.712348 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 06 15:33:07 crc kubenswrapper[5003]: I1206 15:33:07.712424 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 06 15:33:07 crc kubenswrapper[5003]: E1206 15:33:07.712923 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 06 15:33:07 crc kubenswrapper[5003]: E1206 15:33:07.713008 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 06 15:33:07 crc kubenswrapper[5003]: I1206 15:33:07.713103 5003 scope.go:117] "RemoveContainer" containerID="2f0ad17b1a040807cb7cca5ee53ef54da1870f63cadccb3aa20421b08016cf10" Dec 06 15:33:07 crc kubenswrapper[5003]: E1206 15:33:07.713244 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-p7xwd_openshift-ovn-kubernetes(8a695d94-271c-45bc-8a89-dfdecb57ec00)\"" pod="openshift-ovn-kubernetes/ovnkube-node-p7xwd" podUID="8a695d94-271c-45bc-8a89-dfdecb57ec00" Dec 06 15:33:07 crc kubenswrapper[5003]: I1206 15:33:07.726249 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:33:07Z is after 2025-08-24T17:21:41Z" Dec 06 15:33:07 crc kubenswrapper[5003]: I1206 15:33:07.738409 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:24Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:24Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ab9b858ed30211c345e146452a18252752eeedcb3be8054daa2af371297cbb61\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:33:07Z is after 2025-08-24T17:21:41Z" Dec 06 15:33:07 crc kubenswrapper[5003]: I1206 15:33:07.755386 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7xwd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a695d94-271c-45bc-8a89-dfdecb57ec00\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b26a01a6a0644d88028ebc041e00353091f26407f109e02e4aec1ec0e382a4c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b549c3d1ba39068a4f1ce344e0807eed3b4e73ee1902d02cebffe005697201d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5240dcf05f3d661b92408f4c39999571d023d9acdeb31d4a4a99f50e31a627b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e240d517f92fb2b8e24bb6f1c1d98869b41adf8bc3ab687a4426840c7010235e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b871ea5e9d2093579af10786c02da9d7f5afa5351933be742b67b1b682733119\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://276d6d2f680f4e0b2038d12b161e3fd3f85417b5d776b9661559b4812ead1dd9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f0ad17b1a040807cb7cca5ee53ef54da1870f63cadccb3aa20421b08016cf10\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2f0ad17b1a040807cb7cca5ee53ef54da1870f63cadccb3aa20421b08016cf10\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-06T15:32:53Z\\\",\\\"message\\\":\\\"d44-bbd8-dba87b7dbaf0}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1206 15:32:53.311445 6646 ovn.go:134] Ensuring zone local for Pod openshift-multus/multus-additional-cni-plugins-j4rf7 in node crc\\\\nI1206 15:32:53.311457 6646 obj_retry.go:386] Retry successful for *v1.Pod openshift-multus/multus-additional-cni-plugins-j4rf7 after 0 failed attempt(s)\\\\nI1206 15:32:53.311473 6646 default_network_controller.go:776] Recording success event on pod openshift-multus/multus-additional-cni-plugins-j4rf7\\\\nF1206 15:32:53.311504 6646 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:53Z is after 2025-08-24\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:51Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-p7xwd_openshift-ovn-kubernetes(8a695d94-271c-45bc-8a89-dfdecb57ec00)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c56c8c7aa570bb81c41923b20b09d6de6b278e56ca466e7e0ee3cecf113c37d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a00902f2c2c7ca56d86553f78094f40f59ea9ef125f5a388aafd6d7fd0c20d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9a00902f2c2c7ca56d86553f78094f40f59ea9ef125f5a388aafd6d7fd0c20d9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-p7xwd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:33:07Z is after 2025-08-24T17:21:41Z" Dec 06 15:33:07 crc kubenswrapper[5003]: I1206 15:33:07.767107 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-jmzd9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9fa121e1-7f2f-4912-945f-86cb199c3014\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:41Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:41Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9s4lw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9s4lw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:41Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-jmzd9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:33:07Z is after 2025-08-24T17:21:41Z" Dec 06 15:33:07 crc kubenswrapper[5003]: I1206 15:33:07.768788 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:07 crc kubenswrapper[5003]: I1206 15:33:07.768822 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:07 crc kubenswrapper[5003]: I1206 15:33:07.768831 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:07 crc kubenswrapper[5003]: I1206 15:33:07.768845 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:07 crc kubenswrapper[5003]: I1206 15:33:07.768865 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:07Z","lastTransitionTime":"2025-12-06T15:33:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:07 crc kubenswrapper[5003]: I1206 15:33:07.779610 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:33:07Z is after 2025-08-24T17:21:41Z" Dec 06 15:33:07 crc kubenswrapper[5003]: I1206 15:33:07.789549 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-qcqkl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"956ed4a8-8918-48eb-a2f8-f35a9ae17fde\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8ae5ee47d26422cffca5f12a4ee1455dcd34c148b1b490d69b88280b4497da11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbhrw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-qcqkl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:33:07Z is after 2025-08-24T17:21:41Z" Dec 06 15:33:07 crc kubenswrapper[5003]: I1206 15:33:07.801738 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-9kdpn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"350e8b9a-b7bf-4dc9-abe9-d10f7a088be3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e9b32883e20da5b298374f7eebd0a21c5ca51cca23543fc78dfe3edb04e3b661\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-46c8r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-9kdpn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:33:07Z is after 2025-08-24T17:21:41Z" Dec 06 15:33:07 crc kubenswrapper[5003]: I1206 15:33:07.812998 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-7788j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f27a64b8-ecd1-4201-a3bd-a4f5a0aa05a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4bf147399c10dd7e654abd5213c4d90e8aa9feca7f8c6032c16576a25aeace68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twj7c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f47c00cc6d341dbe1a5f3495f04fe4e695370952f6d6b209a5aaff1ace9d17e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twj7c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:39Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-7788j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:33:07Z is after 2025-08-24T17:21:41Z" Dec 06 15:33:07 crc kubenswrapper[5003]: I1206 15:33:07.821479 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f25cfd72-ae1c-45c6-bf50-3f3cd455c1ad\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://15818f84cfa472a42a18bafe5ff4a71da326b2f5871f47e693d5e1a1b3c8b986\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8c1591171f15bdf52339cb914e52de4dad9c34f1a6b6bb882f15bb41308a5b7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://94df3e8c0295aedc3bf7b97296d443b5240d17fcd83f8e8cf1bc9730740d7f29\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3bd16db35730810799b00301bb8a68f91d69dc93dac04638d5c187bdd374393d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3bd16db35730810799b00301bb8a68f91d69dc93dac04638d5c187bdd374393d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:02Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:02Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:33:07Z is after 2025-08-24T17:21:41Z" Dec 06 15:33:07 crc kubenswrapper[5003]: I1206 15:33:07.830846 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:33:07Z is after 2025-08-24T17:21:41Z" Dec 06 15:33:07 crc kubenswrapper[5003]: I1206 15:33:07.838355 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-mdz5n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2a9b6e3b-054f-40de-9ad6-dd7eb78eaea1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cac97fc8a54c2fbd81b0abade11a987e5226084d8a4f75ddbbaad1f6c4cb9783\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wqmpg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:25Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-mdz5n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:33:07Z is after 2025-08-24T17:21:41Z" Dec 06 15:33:07 crc kubenswrapper[5003]: I1206 15:33:07.849778 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-w25db" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1a047c4d-003e-4668-9b96-945eab34ab68\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b3a6db6c2c8e6a283f7b46ef28ba7310d3cfc7054cfd8cbcc290ebd0d26dcf4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6dn2k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b457ba1d45f5e1524b8539c4c7dc77b24f6d5aa8172b46962e31e4fb6723b7ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6dn2k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:26Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-w25db\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:33:07Z is after 2025-08-24T17:21:41Z" Dec 06 15:33:07 crc kubenswrapper[5003]: I1206 15:33:07.859097 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://224b845cf75da20cf87e5435770288110776a0edd92c9c110d6dc539419ecb7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0df37db3b8682032a0065b4ca1cead69bf0b32cb4e2fab89c53e14eb2169b7e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:33:07Z is after 2025-08-24T17:21:41Z" Dec 06 15:33:07 crc kubenswrapper[5003]: I1206 15:33:07.870108 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-j4rf7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7dc41f9e-e763-4a9a-a064-f65bc24332b9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://467dcc8ed3936c6fb24f6c6c3e42eccf3597ce6920d12b253946a123ee22faf8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://79eb73c778f65e6694b0a3243db40d605557c86145f9b40ce2dfcf922ec7f38f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://79eb73c778f65e6694b0a3243db40d605557c86145f9b40ce2dfcf922ec7f38f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://10cf47f05fda3c702decadc5c1cf0b6fb4df56993610f4d7bc2809e685109f6c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://10cf47f05fda3c702decadc5c1cf0b6fb4df56993610f4d7bc2809e685109f6c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a33d5c66fd9234a89ff1f28e4863f80104b5d1caccd24cafa22a4598a42906c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a33d5c66fd9234a89ff1f28e4863f80104b5d1caccd24cafa22a4598a42906c3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://895f9f2647488793d0875e8cb05a6e6515733dae00a856e6a4705702adc3a5ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://895f9f2647488793d0875e8cb05a6e6515733dae00a856e6a4705702adc3a5ed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a02f680341e0dfb569ffcd7902d5b1fe3d575a423ff541d86c7bdd4d76b733b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a02f680341e0dfb569ffcd7902d5b1fe3d575a423ff541d86c7bdd4d76b733b0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f6cda90c763eedc34903164d30c28d3cb696f658455d0a972fbf8d8e5505a587\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f6cda90c763eedc34903164d30c28d3cb696f658455d0a972fbf8d8e5505a587\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-j4rf7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:33:07Z is after 2025-08-24T17:21:41Z" Dec 06 15:33:07 crc kubenswrapper[5003]: I1206 15:33:07.872030 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:07 crc kubenswrapper[5003]: I1206 15:33:07.872066 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:07 crc kubenswrapper[5003]: I1206 15:33:07.872077 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:07 crc kubenswrapper[5003]: I1206 15:33:07.872094 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:07 crc kubenswrapper[5003]: I1206 15:33:07.872106 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:07Z","lastTransitionTime":"2025-12-06T15:33:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:07 crc kubenswrapper[5003]: I1206 15:33:07.883196 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5ae2d36f-5a31-4da3-aae8-0378c481f230\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9f683cdba22afa5d1d069cc32c6c83addf344c8b0ba3b39b7a2ca527408922c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c777cc5d07ff005aa8001da8a566484710f0253e4a6061e3297a884c6153a7d0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ac8291e7fa9a5ff379474802b6eae771542705aef4c443cc3123837d10dd9e5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae2215e89d65c7bd69288cda74cdd83f0349f57b47f80ff1fa03d60a484558b5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://efc65bc9bb60202069cb51eed80fb62e527399c4189554e791bab3e23993c95c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-06T15:32:20Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1206 15:32:14.957831 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1206 15:32:14.959306 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1213261896/tls.crt::/tmp/serving-cert-1213261896/tls.key\\\\\\\"\\\\nI1206 15:32:20.684925 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1206 15:32:20.690335 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1206 15:32:20.690422 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1206 15:32:20.690568 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1206 15:32:20.690599 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1206 15:32:20.700990 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1206 15:32:20.701016 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1206 15:32:20.701048 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1206 15:32:20.701062 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1206 15:32:20.701075 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1206 15:32:20.701085 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1206 15:32:20.701093 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1206 15:32:20.701100 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1206 15:32:20.702607 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:04Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6013b3a95ef99138e6ea971b51a45f65923c09435a2595bba7af91eebb28d76\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4256540a96bdcd018eb514cf70eea305513bb418afa89c8bfa5179c67822380e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4256540a96bdcd018eb514cf70eea305513bb418afa89c8bfa5179c67822380e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:33:07Z is after 2025-08-24T17:21:41Z" Dec 06 15:33:07 crc kubenswrapper[5003]: I1206 15:33:07.896303 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c48c2555-6e1a-4e2a-801c-de22b016a80d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d0880a937c71f4a7b46ffc4dcc10f50f5db23655dedb5d7e95b36cf0fcb44928\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f5d5d47cea1d06df2f36a0a15126c2419e5051d84842c781921577bec3c65f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5e7cb74b8b6e2a00de6dbbd1689b52fcc3ae9862d40685bab7a805646abd4a3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3377599a5ceebbce7bd9d45a6f78122fc48d86ff917a6dcfc03304ca3361659\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:02Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:33:07Z is after 2025-08-24T17:21:41Z" Dec 06 15:33:07 crc kubenswrapper[5003]: I1206 15:33:07.910749 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa744e792aa97558246eaa95c80a307138b1a6e08fe5de144b3fff62f3932e20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:33:07Z is after 2025-08-24T17:21:41Z" Dec 06 15:33:07 crc kubenswrapper[5003]: I1206 15:33:07.974439 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:07 crc kubenswrapper[5003]: I1206 15:33:07.974473 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:07 crc kubenswrapper[5003]: I1206 15:33:07.974483 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:07 crc kubenswrapper[5003]: I1206 15:33:07.974537 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:07 crc kubenswrapper[5003]: I1206 15:33:07.974548 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:07Z","lastTransitionTime":"2025-12-06T15:33:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:08 crc kubenswrapper[5003]: I1206 15:33:08.076590 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:08 crc kubenswrapper[5003]: I1206 15:33:08.076645 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:08 crc kubenswrapper[5003]: I1206 15:33:08.076656 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:08 crc kubenswrapper[5003]: I1206 15:33:08.076674 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:08 crc kubenswrapper[5003]: I1206 15:33:08.076688 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:08Z","lastTransitionTime":"2025-12-06T15:33:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:08 crc kubenswrapper[5003]: I1206 15:33:08.179902 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:08 crc kubenswrapper[5003]: I1206 15:33:08.179939 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:08 crc kubenswrapper[5003]: I1206 15:33:08.179949 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:08 crc kubenswrapper[5003]: I1206 15:33:08.179966 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:08 crc kubenswrapper[5003]: I1206 15:33:08.179978 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:08Z","lastTransitionTime":"2025-12-06T15:33:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:08 crc kubenswrapper[5003]: I1206 15:33:08.283120 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:08 crc kubenswrapper[5003]: I1206 15:33:08.283151 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:08 crc kubenswrapper[5003]: I1206 15:33:08.283159 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:08 crc kubenswrapper[5003]: I1206 15:33:08.283172 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:08 crc kubenswrapper[5003]: I1206 15:33:08.283181 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:08Z","lastTransitionTime":"2025-12-06T15:33:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:08 crc kubenswrapper[5003]: I1206 15:33:08.425240 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:08 crc kubenswrapper[5003]: I1206 15:33:08.425274 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:08 crc kubenswrapper[5003]: I1206 15:33:08.425285 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:08 crc kubenswrapper[5003]: I1206 15:33:08.425300 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:08 crc kubenswrapper[5003]: I1206 15:33:08.425311 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:08Z","lastTransitionTime":"2025-12-06T15:33:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:08 crc kubenswrapper[5003]: I1206 15:33:08.527479 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:08 crc kubenswrapper[5003]: I1206 15:33:08.527541 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:08 crc kubenswrapper[5003]: I1206 15:33:08.527553 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:08 crc kubenswrapper[5003]: I1206 15:33:08.527571 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:08 crc kubenswrapper[5003]: I1206 15:33:08.527585 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:08Z","lastTransitionTime":"2025-12-06T15:33:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:08 crc kubenswrapper[5003]: I1206 15:33:08.630450 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:08 crc kubenswrapper[5003]: I1206 15:33:08.630714 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:08 crc kubenswrapper[5003]: I1206 15:33:08.630732 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:08 crc kubenswrapper[5003]: I1206 15:33:08.630768 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:08 crc kubenswrapper[5003]: I1206 15:33:08.630780 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:08Z","lastTransitionTime":"2025-12-06T15:33:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:08 crc kubenswrapper[5003]: I1206 15:33:08.711597 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-jmzd9" Dec 06 15:33:08 crc kubenswrapper[5003]: E1206 15:33:08.711761 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-jmzd9" podUID="9fa121e1-7f2f-4912-945f-86cb199c3014" Dec 06 15:33:08 crc kubenswrapper[5003]: I1206 15:33:08.733647 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:08 crc kubenswrapper[5003]: I1206 15:33:08.733696 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:08 crc kubenswrapper[5003]: I1206 15:33:08.733710 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:08 crc kubenswrapper[5003]: I1206 15:33:08.733734 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:08 crc kubenswrapper[5003]: I1206 15:33:08.733745 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:08Z","lastTransitionTime":"2025-12-06T15:33:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:08 crc kubenswrapper[5003]: I1206 15:33:08.836617 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:08 crc kubenswrapper[5003]: I1206 15:33:08.836649 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:08 crc kubenswrapper[5003]: I1206 15:33:08.836659 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:08 crc kubenswrapper[5003]: I1206 15:33:08.836673 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:08 crc kubenswrapper[5003]: I1206 15:33:08.836682 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:08Z","lastTransitionTime":"2025-12-06T15:33:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:08 crc kubenswrapper[5003]: I1206 15:33:08.939132 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:08 crc kubenswrapper[5003]: I1206 15:33:08.939173 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:08 crc kubenswrapper[5003]: I1206 15:33:08.939182 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:08 crc kubenswrapper[5003]: I1206 15:33:08.939197 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:08 crc kubenswrapper[5003]: I1206 15:33:08.939207 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:08Z","lastTransitionTime":"2025-12-06T15:33:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:09 crc kubenswrapper[5003]: I1206 15:33:09.041633 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:09 crc kubenswrapper[5003]: I1206 15:33:09.041675 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:09 crc kubenswrapper[5003]: I1206 15:33:09.041687 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:09 crc kubenswrapper[5003]: I1206 15:33:09.041702 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:09 crc kubenswrapper[5003]: I1206 15:33:09.041712 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:09Z","lastTransitionTime":"2025-12-06T15:33:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:09 crc kubenswrapper[5003]: I1206 15:33:09.143400 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:09 crc kubenswrapper[5003]: I1206 15:33:09.143433 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:09 crc kubenswrapper[5003]: I1206 15:33:09.143441 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:09 crc kubenswrapper[5003]: I1206 15:33:09.143454 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:09 crc kubenswrapper[5003]: I1206 15:33:09.143462 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:09Z","lastTransitionTime":"2025-12-06T15:33:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:09 crc kubenswrapper[5003]: I1206 15:33:09.245792 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:09 crc kubenswrapper[5003]: I1206 15:33:09.245834 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:09 crc kubenswrapper[5003]: I1206 15:33:09.245846 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:09 crc kubenswrapper[5003]: I1206 15:33:09.245865 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:09 crc kubenswrapper[5003]: I1206 15:33:09.245876 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:09Z","lastTransitionTime":"2025-12-06T15:33:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:09 crc kubenswrapper[5003]: I1206 15:33:09.347899 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:09 crc kubenswrapper[5003]: I1206 15:33:09.347955 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:09 crc kubenswrapper[5003]: I1206 15:33:09.347971 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:09 crc kubenswrapper[5003]: I1206 15:33:09.347995 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:09 crc kubenswrapper[5003]: I1206 15:33:09.348011 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:09Z","lastTransitionTime":"2025-12-06T15:33:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:09 crc kubenswrapper[5003]: I1206 15:33:09.449480 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:09 crc kubenswrapper[5003]: I1206 15:33:09.449721 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:09 crc kubenswrapper[5003]: I1206 15:33:09.449732 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:09 crc kubenswrapper[5003]: I1206 15:33:09.449745 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:09 crc kubenswrapper[5003]: I1206 15:33:09.449753 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:09Z","lastTransitionTime":"2025-12-06T15:33:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:09 crc kubenswrapper[5003]: I1206 15:33:09.551909 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:09 crc kubenswrapper[5003]: I1206 15:33:09.551980 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:09 crc kubenswrapper[5003]: I1206 15:33:09.551995 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:09 crc kubenswrapper[5003]: I1206 15:33:09.552014 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:09 crc kubenswrapper[5003]: I1206 15:33:09.552026 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:09Z","lastTransitionTime":"2025-12-06T15:33:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:09 crc kubenswrapper[5003]: I1206 15:33:09.654342 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:09 crc kubenswrapper[5003]: I1206 15:33:09.654371 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:09 crc kubenswrapper[5003]: I1206 15:33:09.654379 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:09 crc kubenswrapper[5003]: I1206 15:33:09.654394 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:09 crc kubenswrapper[5003]: I1206 15:33:09.654404 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:09Z","lastTransitionTime":"2025-12-06T15:33:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:09 crc kubenswrapper[5003]: I1206 15:33:09.711895 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 06 15:33:09 crc kubenswrapper[5003]: I1206 15:33:09.711926 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 06 15:33:09 crc kubenswrapper[5003]: E1206 15:33:09.712042 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 06 15:33:09 crc kubenswrapper[5003]: I1206 15:33:09.712118 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 06 15:33:09 crc kubenswrapper[5003]: E1206 15:33:09.712240 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 06 15:33:09 crc kubenswrapper[5003]: E1206 15:33:09.712365 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 06 15:33:09 crc kubenswrapper[5003]: I1206 15:33:09.756121 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:09 crc kubenswrapper[5003]: I1206 15:33:09.756336 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:09 crc kubenswrapper[5003]: I1206 15:33:09.756448 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:09 crc kubenswrapper[5003]: I1206 15:33:09.756665 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:09 crc kubenswrapper[5003]: I1206 15:33:09.756865 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:09Z","lastTransitionTime":"2025-12-06T15:33:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:09 crc kubenswrapper[5003]: I1206 15:33:09.859827 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:09 crc kubenswrapper[5003]: I1206 15:33:09.859881 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:09 crc kubenswrapper[5003]: I1206 15:33:09.859894 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:09 crc kubenswrapper[5003]: I1206 15:33:09.859909 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:09 crc kubenswrapper[5003]: I1206 15:33:09.859920 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:09Z","lastTransitionTime":"2025-12-06T15:33:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:09 crc kubenswrapper[5003]: I1206 15:33:09.962773 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:09 crc kubenswrapper[5003]: I1206 15:33:09.962859 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:09 crc kubenswrapper[5003]: I1206 15:33:09.962872 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:09 crc kubenswrapper[5003]: I1206 15:33:09.962889 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:09 crc kubenswrapper[5003]: I1206 15:33:09.962903 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:09Z","lastTransitionTime":"2025-12-06T15:33:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:10 crc kubenswrapper[5003]: I1206 15:33:10.065166 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:10 crc kubenswrapper[5003]: I1206 15:33:10.065233 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:10 crc kubenswrapper[5003]: I1206 15:33:10.065261 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:10 crc kubenswrapper[5003]: I1206 15:33:10.065286 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:10 crc kubenswrapper[5003]: I1206 15:33:10.065302 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:10Z","lastTransitionTime":"2025-12-06T15:33:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:10 crc kubenswrapper[5003]: I1206 15:33:10.168692 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:10 crc kubenswrapper[5003]: I1206 15:33:10.168740 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:10 crc kubenswrapper[5003]: I1206 15:33:10.168751 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:10 crc kubenswrapper[5003]: I1206 15:33:10.168769 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:10 crc kubenswrapper[5003]: I1206 15:33:10.168780 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:10Z","lastTransitionTime":"2025-12-06T15:33:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:10 crc kubenswrapper[5003]: I1206 15:33:10.271566 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:10 crc kubenswrapper[5003]: I1206 15:33:10.271646 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:10 crc kubenswrapper[5003]: I1206 15:33:10.271669 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:10 crc kubenswrapper[5003]: I1206 15:33:10.271701 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:10 crc kubenswrapper[5003]: I1206 15:33:10.271724 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:10Z","lastTransitionTime":"2025-12-06T15:33:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:10 crc kubenswrapper[5003]: I1206 15:33:10.374956 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:10 crc kubenswrapper[5003]: I1206 15:33:10.374992 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:10 crc kubenswrapper[5003]: I1206 15:33:10.375003 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:10 crc kubenswrapper[5003]: I1206 15:33:10.375018 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:10 crc kubenswrapper[5003]: I1206 15:33:10.375029 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:10Z","lastTransitionTime":"2025-12-06T15:33:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:10 crc kubenswrapper[5003]: I1206 15:33:10.478962 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:10 crc kubenswrapper[5003]: I1206 15:33:10.479020 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:10 crc kubenswrapper[5003]: I1206 15:33:10.479029 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:10 crc kubenswrapper[5003]: I1206 15:33:10.479053 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:10 crc kubenswrapper[5003]: I1206 15:33:10.479066 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:10Z","lastTransitionTime":"2025-12-06T15:33:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:10 crc kubenswrapper[5003]: I1206 15:33:10.581336 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:10 crc kubenswrapper[5003]: I1206 15:33:10.581395 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:10 crc kubenswrapper[5003]: I1206 15:33:10.581409 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:10 crc kubenswrapper[5003]: I1206 15:33:10.581432 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:10 crc kubenswrapper[5003]: I1206 15:33:10.581446 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:10Z","lastTransitionTime":"2025-12-06T15:33:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:10 crc kubenswrapper[5003]: I1206 15:33:10.684178 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:10 crc kubenswrapper[5003]: I1206 15:33:10.684475 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:10 crc kubenswrapper[5003]: I1206 15:33:10.684510 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:10 crc kubenswrapper[5003]: I1206 15:33:10.684526 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:10 crc kubenswrapper[5003]: I1206 15:33:10.684536 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:10Z","lastTransitionTime":"2025-12-06T15:33:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:10 crc kubenswrapper[5003]: I1206 15:33:10.711623 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-jmzd9" Dec 06 15:33:10 crc kubenswrapper[5003]: E1206 15:33:10.711870 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-jmzd9" podUID="9fa121e1-7f2f-4912-945f-86cb199c3014" Dec 06 15:33:10 crc kubenswrapper[5003]: I1206 15:33:10.787637 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:10 crc kubenswrapper[5003]: I1206 15:33:10.787725 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:10 crc kubenswrapper[5003]: I1206 15:33:10.787748 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:10 crc kubenswrapper[5003]: I1206 15:33:10.787780 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:10 crc kubenswrapper[5003]: I1206 15:33:10.787802 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:10Z","lastTransitionTime":"2025-12-06T15:33:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:10 crc kubenswrapper[5003]: I1206 15:33:10.890583 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:10 crc kubenswrapper[5003]: I1206 15:33:10.890619 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:10 crc kubenswrapper[5003]: I1206 15:33:10.890628 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:10 crc kubenswrapper[5003]: I1206 15:33:10.890640 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:10 crc kubenswrapper[5003]: I1206 15:33:10.890649 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:10Z","lastTransitionTime":"2025-12-06T15:33:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:10 crc kubenswrapper[5003]: I1206 15:33:10.992863 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:10 crc kubenswrapper[5003]: I1206 15:33:10.992933 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:10 crc kubenswrapper[5003]: I1206 15:33:10.992955 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:10 crc kubenswrapper[5003]: I1206 15:33:10.992986 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:10 crc kubenswrapper[5003]: I1206 15:33:10.993011 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:10Z","lastTransitionTime":"2025-12-06T15:33:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:11 crc kubenswrapper[5003]: I1206 15:33:11.097072 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:11 crc kubenswrapper[5003]: I1206 15:33:11.097427 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:11 crc kubenswrapper[5003]: I1206 15:33:11.097639 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:11 crc kubenswrapper[5003]: I1206 15:33:11.097797 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:11 crc kubenswrapper[5003]: I1206 15:33:11.097933 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:11Z","lastTransitionTime":"2025-12-06T15:33:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:11 crc kubenswrapper[5003]: I1206 15:33:11.200629 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:11 crc kubenswrapper[5003]: I1206 15:33:11.200700 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:11 crc kubenswrapper[5003]: I1206 15:33:11.200723 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:11 crc kubenswrapper[5003]: I1206 15:33:11.200753 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:11 crc kubenswrapper[5003]: I1206 15:33:11.200787 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:11Z","lastTransitionTime":"2025-12-06T15:33:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:11 crc kubenswrapper[5003]: I1206 15:33:11.303616 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:11 crc kubenswrapper[5003]: I1206 15:33:11.303702 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:11 crc kubenswrapper[5003]: I1206 15:33:11.303724 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:11 crc kubenswrapper[5003]: I1206 15:33:11.303750 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:11 crc kubenswrapper[5003]: I1206 15:33:11.303769 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:11Z","lastTransitionTime":"2025-12-06T15:33:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:11 crc kubenswrapper[5003]: I1206 15:33:11.406472 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:11 crc kubenswrapper[5003]: I1206 15:33:11.406565 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:11 crc kubenswrapper[5003]: I1206 15:33:11.406582 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:11 crc kubenswrapper[5003]: I1206 15:33:11.406626 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:11 crc kubenswrapper[5003]: I1206 15:33:11.406643 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:11Z","lastTransitionTime":"2025-12-06T15:33:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:11 crc kubenswrapper[5003]: I1206 15:33:11.508930 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:11 crc kubenswrapper[5003]: I1206 15:33:11.508971 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:11 crc kubenswrapper[5003]: I1206 15:33:11.508983 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:11 crc kubenswrapper[5003]: I1206 15:33:11.508998 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:11 crc kubenswrapper[5003]: I1206 15:33:11.509008 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:11Z","lastTransitionTime":"2025-12-06T15:33:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:11 crc kubenswrapper[5003]: I1206 15:33:11.611556 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:11 crc kubenswrapper[5003]: I1206 15:33:11.611608 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:11 crc kubenswrapper[5003]: I1206 15:33:11.611620 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:11 crc kubenswrapper[5003]: I1206 15:33:11.611640 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:11 crc kubenswrapper[5003]: I1206 15:33:11.611653 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:11Z","lastTransitionTime":"2025-12-06T15:33:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:11 crc kubenswrapper[5003]: I1206 15:33:11.711333 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 06 15:33:11 crc kubenswrapper[5003]: I1206 15:33:11.711372 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 06 15:33:11 crc kubenswrapper[5003]: I1206 15:33:11.711423 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 06 15:33:11 crc kubenswrapper[5003]: E1206 15:33:11.711536 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 06 15:33:11 crc kubenswrapper[5003]: E1206 15:33:11.711636 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 06 15:33:11 crc kubenswrapper[5003]: E1206 15:33:11.711726 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 06 15:33:11 crc kubenswrapper[5003]: I1206 15:33:11.714067 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:11 crc kubenswrapper[5003]: I1206 15:33:11.714111 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:11 crc kubenswrapper[5003]: I1206 15:33:11.714130 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:11 crc kubenswrapper[5003]: I1206 15:33:11.714153 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:11 crc kubenswrapper[5003]: I1206 15:33:11.714217 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:11Z","lastTransitionTime":"2025-12-06T15:33:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:11 crc kubenswrapper[5003]: I1206 15:33:11.728338 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:24Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:24Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ab9b858ed30211c345e146452a18252752eeedcb3be8054daa2af371297cbb61\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:33:11Z is after 2025-08-24T17:21:41Z" Dec 06 15:33:11 crc kubenswrapper[5003]: I1206 15:33:11.749349 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7xwd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a695d94-271c-45bc-8a89-dfdecb57ec00\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b26a01a6a0644d88028ebc041e00353091f26407f109e02e4aec1ec0e382a4c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b549c3d1ba39068a4f1ce344e0807eed3b4e73ee1902d02cebffe005697201d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5240dcf05f3d661b92408f4c39999571d023d9acdeb31d4a4a99f50e31a627b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e240d517f92fb2b8e24bb6f1c1d98869b41adf8bc3ab687a4426840c7010235e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b871ea5e9d2093579af10786c02da9d7f5afa5351933be742b67b1b682733119\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://276d6d2f680f4e0b2038d12b161e3fd3f85417b5d776b9661559b4812ead1dd9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f0ad17b1a040807cb7cca5ee53ef54da1870f63cadccb3aa20421b08016cf10\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2f0ad17b1a040807cb7cca5ee53ef54da1870f63cadccb3aa20421b08016cf10\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-06T15:32:53Z\\\",\\\"message\\\":\\\"d44-bbd8-dba87b7dbaf0}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1206 15:32:53.311445 6646 ovn.go:134] Ensuring zone local for Pod openshift-multus/multus-additional-cni-plugins-j4rf7 in node crc\\\\nI1206 15:32:53.311457 6646 obj_retry.go:386] Retry successful for *v1.Pod openshift-multus/multus-additional-cni-plugins-j4rf7 after 0 failed attempt(s)\\\\nI1206 15:32:53.311473 6646 default_network_controller.go:776] Recording success event on pod openshift-multus/multus-additional-cni-plugins-j4rf7\\\\nF1206 15:32:53.311504 6646 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:53Z is after 2025-08-24\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:51Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-p7xwd_openshift-ovn-kubernetes(8a695d94-271c-45bc-8a89-dfdecb57ec00)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c56c8c7aa570bb81c41923b20b09d6de6b278e56ca466e7e0ee3cecf113c37d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a00902f2c2c7ca56d86553f78094f40f59ea9ef125f5a388aafd6d7fd0c20d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9a00902f2c2c7ca56d86553f78094f40f59ea9ef125f5a388aafd6d7fd0c20d9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-p7xwd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:33:11Z is after 2025-08-24T17:21:41Z" Dec 06 15:33:11 crc kubenswrapper[5003]: I1206 15:33:11.765900 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:33:11Z is after 2025-08-24T17:21:41Z" Dec 06 15:33:11 crc kubenswrapper[5003]: I1206 15:33:11.781190 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:33:11Z is after 2025-08-24T17:21:41Z" Dec 06 15:33:11 crc kubenswrapper[5003]: I1206 15:33:11.793157 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-qcqkl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"956ed4a8-8918-48eb-a2f8-f35a9ae17fde\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8ae5ee47d26422cffca5f12a4ee1455dcd34c148b1b490d69b88280b4497da11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbhrw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-qcqkl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:33:11Z is after 2025-08-24T17:21:41Z" Dec 06 15:33:11 crc kubenswrapper[5003]: I1206 15:33:11.807279 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-9kdpn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"350e8b9a-b7bf-4dc9-abe9-d10f7a088be3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e9b32883e20da5b298374f7eebd0a21c5ca51cca23543fc78dfe3edb04e3b661\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-46c8r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-9kdpn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:33:11Z is after 2025-08-24T17:21:41Z" Dec 06 15:33:11 crc kubenswrapper[5003]: I1206 15:33:11.816236 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:11 crc kubenswrapper[5003]: I1206 15:33:11.816261 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:11 crc kubenswrapper[5003]: I1206 15:33:11.816269 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:11 crc kubenswrapper[5003]: I1206 15:33:11.816282 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:11 crc kubenswrapper[5003]: I1206 15:33:11.816293 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:11Z","lastTransitionTime":"2025-12-06T15:33:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:11 crc kubenswrapper[5003]: I1206 15:33:11.822530 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-jmzd9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9fa121e1-7f2f-4912-945f-86cb199c3014\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:41Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:41Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9s4lw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9s4lw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:41Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-jmzd9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:33:11Z is after 2025-08-24T17:21:41Z" Dec 06 15:33:11 crc kubenswrapper[5003]: I1206 15:33:11.838116 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:33:11Z is after 2025-08-24T17:21:41Z" Dec 06 15:33:11 crc kubenswrapper[5003]: I1206 15:33:11.850888 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-mdz5n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2a9b6e3b-054f-40de-9ad6-dd7eb78eaea1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cac97fc8a54c2fbd81b0abade11a987e5226084d8a4f75ddbbaad1f6c4cb9783\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wqmpg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:25Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-mdz5n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:33:11Z is after 2025-08-24T17:21:41Z" Dec 06 15:33:11 crc kubenswrapper[5003]: I1206 15:33:11.861726 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-w25db" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1a047c4d-003e-4668-9b96-945eab34ab68\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b3a6db6c2c8e6a283f7b46ef28ba7310d3cfc7054cfd8cbcc290ebd0d26dcf4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6dn2k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b457ba1d45f5e1524b8539c4c7dc77b24f6d5aa8172b46962e31e4fb6723b7ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6dn2k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:26Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-w25db\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:33:11Z is after 2025-08-24T17:21:41Z" Dec 06 15:33:11 crc kubenswrapper[5003]: I1206 15:33:11.871808 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-7788j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f27a64b8-ecd1-4201-a3bd-a4f5a0aa05a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4bf147399c10dd7e654abd5213c4d90e8aa9feca7f8c6032c16576a25aeace68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twj7c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f47c00cc6d341dbe1a5f3495f04fe4e695370952f6d6b209a5aaff1ace9d17e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twj7c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:39Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-7788j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:33:11Z is after 2025-08-24T17:21:41Z" Dec 06 15:33:11 crc kubenswrapper[5003]: I1206 15:33:11.887900 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f25cfd72-ae1c-45c6-bf50-3f3cd455c1ad\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://15818f84cfa472a42a18bafe5ff4a71da326b2f5871f47e693d5e1a1b3c8b986\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8c1591171f15bdf52339cb914e52de4dad9c34f1a6b6bb882f15bb41308a5b7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://94df3e8c0295aedc3bf7b97296d443b5240d17fcd83f8e8cf1bc9730740d7f29\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3bd16db35730810799b00301bb8a68f91d69dc93dac04638d5c187bdd374393d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3bd16db35730810799b00301bb8a68f91d69dc93dac04638d5c187bdd374393d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:02Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:02Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:33:11Z is after 2025-08-24T17:21:41Z" Dec 06 15:33:11 crc kubenswrapper[5003]: I1206 15:33:11.905999 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c48c2555-6e1a-4e2a-801c-de22b016a80d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d0880a937c71f4a7b46ffc4dcc10f50f5db23655dedb5d7e95b36cf0fcb44928\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f5d5d47cea1d06df2f36a0a15126c2419e5051d84842c781921577bec3c65f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5e7cb74b8b6e2a00de6dbbd1689b52fcc3ae9862d40685bab7a805646abd4a3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3377599a5ceebbce7bd9d45a6f78122fc48d86ff917a6dcfc03304ca3361659\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:02Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:33:11Z is after 2025-08-24T17:21:41Z" Dec 06 15:33:11 crc kubenswrapper[5003]: I1206 15:33:11.920337 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:11 crc kubenswrapper[5003]: I1206 15:33:11.920386 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:11 crc kubenswrapper[5003]: I1206 15:33:11.920395 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:11 crc kubenswrapper[5003]: I1206 15:33:11.920410 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:11 crc kubenswrapper[5003]: I1206 15:33:11.920419 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:11Z","lastTransitionTime":"2025-12-06T15:33:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:11 crc kubenswrapper[5003]: I1206 15:33:11.920979 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa744e792aa97558246eaa95c80a307138b1a6e08fe5de144b3fff62f3932e20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:33:11Z is after 2025-08-24T17:21:41Z" Dec 06 15:33:11 crc kubenswrapper[5003]: I1206 15:33:11.933768 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://224b845cf75da20cf87e5435770288110776a0edd92c9c110d6dc539419ecb7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0df37db3b8682032a0065b4ca1cead69bf0b32cb4e2fab89c53e14eb2169b7e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:33:11Z is after 2025-08-24T17:21:41Z" Dec 06 15:33:11 crc kubenswrapper[5003]: I1206 15:33:11.946140 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-j4rf7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7dc41f9e-e763-4a9a-a064-f65bc24332b9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://467dcc8ed3936c6fb24f6c6c3e42eccf3597ce6920d12b253946a123ee22faf8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://79eb73c778f65e6694b0a3243db40d605557c86145f9b40ce2dfcf922ec7f38f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://79eb73c778f65e6694b0a3243db40d605557c86145f9b40ce2dfcf922ec7f38f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://10cf47f05fda3c702decadc5c1cf0b6fb4df56993610f4d7bc2809e685109f6c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://10cf47f05fda3c702decadc5c1cf0b6fb4df56993610f4d7bc2809e685109f6c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a33d5c66fd9234a89ff1f28e4863f80104b5d1caccd24cafa22a4598a42906c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a33d5c66fd9234a89ff1f28e4863f80104b5d1caccd24cafa22a4598a42906c3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://895f9f2647488793d0875e8cb05a6e6515733dae00a856e6a4705702adc3a5ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://895f9f2647488793d0875e8cb05a6e6515733dae00a856e6a4705702adc3a5ed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a02f680341e0dfb569ffcd7902d5b1fe3d575a423ff541d86c7bdd4d76b733b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a02f680341e0dfb569ffcd7902d5b1fe3d575a423ff541d86c7bdd4d76b733b0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f6cda90c763eedc34903164d30c28d3cb696f658455d0a972fbf8d8e5505a587\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f6cda90c763eedc34903164d30c28d3cb696f658455d0a972fbf8d8e5505a587\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-j4rf7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:33:11Z is after 2025-08-24T17:21:41Z" Dec 06 15:33:11 crc kubenswrapper[5003]: I1206 15:33:11.958754 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5ae2d36f-5a31-4da3-aae8-0378c481f230\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9f683cdba22afa5d1d069cc32c6c83addf344c8b0ba3b39b7a2ca527408922c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c777cc5d07ff005aa8001da8a566484710f0253e4a6061e3297a884c6153a7d0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ac8291e7fa9a5ff379474802b6eae771542705aef4c443cc3123837d10dd9e5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae2215e89d65c7bd69288cda74cdd83f0349f57b47f80ff1fa03d60a484558b5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://efc65bc9bb60202069cb51eed80fb62e527399c4189554e791bab3e23993c95c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-06T15:32:20Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1206 15:32:14.957831 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1206 15:32:14.959306 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1213261896/tls.crt::/tmp/serving-cert-1213261896/tls.key\\\\\\\"\\\\nI1206 15:32:20.684925 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1206 15:32:20.690335 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1206 15:32:20.690422 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1206 15:32:20.690568 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1206 15:32:20.690599 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1206 15:32:20.700990 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1206 15:32:20.701016 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1206 15:32:20.701048 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1206 15:32:20.701062 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1206 15:32:20.701075 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1206 15:32:20.701085 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1206 15:32:20.701093 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1206 15:32:20.701100 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1206 15:32:20.702607 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:04Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6013b3a95ef99138e6ea971b51a45f65923c09435a2595bba7af91eebb28d76\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4256540a96bdcd018eb514cf70eea305513bb418afa89c8bfa5179c67822380e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4256540a96bdcd018eb514cf70eea305513bb418afa89c8bfa5179c67822380e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:33:11Z is after 2025-08-24T17:21:41Z" Dec 06 15:33:12 crc kubenswrapper[5003]: I1206 15:33:12.022709 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:12 crc kubenswrapper[5003]: I1206 15:33:12.022806 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:12 crc kubenswrapper[5003]: I1206 15:33:12.022830 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:12 crc kubenswrapper[5003]: I1206 15:33:12.022856 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:12 crc kubenswrapper[5003]: I1206 15:33:12.022876 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:12Z","lastTransitionTime":"2025-12-06T15:33:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:12 crc kubenswrapper[5003]: I1206 15:33:12.124653 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:12 crc kubenswrapper[5003]: I1206 15:33:12.124731 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:12 crc kubenswrapper[5003]: I1206 15:33:12.124755 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:12 crc kubenswrapper[5003]: I1206 15:33:12.124787 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:12 crc kubenswrapper[5003]: I1206 15:33:12.124813 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:12Z","lastTransitionTime":"2025-12-06T15:33:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:12 crc kubenswrapper[5003]: I1206 15:33:12.226948 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:12 crc kubenswrapper[5003]: I1206 15:33:12.226993 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:12 crc kubenswrapper[5003]: I1206 15:33:12.227004 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:12 crc kubenswrapper[5003]: I1206 15:33:12.227022 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:12 crc kubenswrapper[5003]: I1206 15:33:12.227035 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:12Z","lastTransitionTime":"2025-12-06T15:33:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:12 crc kubenswrapper[5003]: I1206 15:33:12.329152 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:12 crc kubenswrapper[5003]: I1206 15:33:12.329202 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:12 crc kubenswrapper[5003]: I1206 15:33:12.329212 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:12 crc kubenswrapper[5003]: I1206 15:33:12.329229 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:12 crc kubenswrapper[5003]: I1206 15:33:12.329240 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:12Z","lastTransitionTime":"2025-12-06T15:33:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:12 crc kubenswrapper[5003]: I1206 15:33:12.432359 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:12 crc kubenswrapper[5003]: I1206 15:33:12.432403 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:12 crc kubenswrapper[5003]: I1206 15:33:12.432414 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:12 crc kubenswrapper[5003]: I1206 15:33:12.432429 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:12 crc kubenswrapper[5003]: I1206 15:33:12.432444 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:12Z","lastTransitionTime":"2025-12-06T15:33:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:12 crc kubenswrapper[5003]: I1206 15:33:12.534562 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:12 crc kubenswrapper[5003]: I1206 15:33:12.534623 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:12 crc kubenswrapper[5003]: I1206 15:33:12.534640 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:12 crc kubenswrapper[5003]: I1206 15:33:12.534664 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:12 crc kubenswrapper[5003]: I1206 15:33:12.534683 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:12Z","lastTransitionTime":"2025-12-06T15:33:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:12 crc kubenswrapper[5003]: I1206 15:33:12.637072 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:12 crc kubenswrapper[5003]: I1206 15:33:12.637122 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:12 crc kubenswrapper[5003]: I1206 15:33:12.637139 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:12 crc kubenswrapper[5003]: I1206 15:33:12.637161 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:12 crc kubenswrapper[5003]: I1206 15:33:12.637178 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:12Z","lastTransitionTime":"2025-12-06T15:33:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:12 crc kubenswrapper[5003]: I1206 15:33:12.712001 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-jmzd9" Dec 06 15:33:12 crc kubenswrapper[5003]: E1206 15:33:12.712146 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-jmzd9" podUID="9fa121e1-7f2f-4912-945f-86cb199c3014" Dec 06 15:33:12 crc kubenswrapper[5003]: I1206 15:33:12.739087 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:12 crc kubenswrapper[5003]: I1206 15:33:12.739129 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:12 crc kubenswrapper[5003]: I1206 15:33:12.739140 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:12 crc kubenswrapper[5003]: I1206 15:33:12.739156 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:12 crc kubenswrapper[5003]: I1206 15:33:12.739168 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:12Z","lastTransitionTime":"2025-12-06T15:33:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:12 crc kubenswrapper[5003]: I1206 15:33:12.842347 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:12 crc kubenswrapper[5003]: I1206 15:33:12.842396 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:12 crc kubenswrapper[5003]: I1206 15:33:12.842408 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:12 crc kubenswrapper[5003]: I1206 15:33:12.842423 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:12 crc kubenswrapper[5003]: I1206 15:33:12.842434 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:12Z","lastTransitionTime":"2025-12-06T15:33:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:12 crc kubenswrapper[5003]: I1206 15:33:12.944885 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:12 crc kubenswrapper[5003]: I1206 15:33:12.944930 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:12 crc kubenswrapper[5003]: I1206 15:33:12.944940 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:12 crc kubenswrapper[5003]: I1206 15:33:12.944955 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:12 crc kubenswrapper[5003]: I1206 15:33:12.944968 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:12Z","lastTransitionTime":"2025-12-06T15:33:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:13 crc kubenswrapper[5003]: I1206 15:33:13.047945 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:13 crc kubenswrapper[5003]: I1206 15:33:13.048001 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:13 crc kubenswrapper[5003]: I1206 15:33:13.048010 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:13 crc kubenswrapper[5003]: I1206 15:33:13.048025 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:13 crc kubenswrapper[5003]: I1206 15:33:13.048034 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:13Z","lastTransitionTime":"2025-12-06T15:33:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:13 crc kubenswrapper[5003]: I1206 15:33:13.067594 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/9fa121e1-7f2f-4912-945f-86cb199c3014-metrics-certs\") pod \"network-metrics-daemon-jmzd9\" (UID: \"9fa121e1-7f2f-4912-945f-86cb199c3014\") " pod="openshift-multus/network-metrics-daemon-jmzd9" Dec 06 15:33:13 crc kubenswrapper[5003]: E1206 15:33:13.067720 5003 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 06 15:33:13 crc kubenswrapper[5003]: E1206 15:33:13.067776 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/9fa121e1-7f2f-4912-945f-86cb199c3014-metrics-certs podName:9fa121e1-7f2f-4912-945f-86cb199c3014 nodeName:}" failed. No retries permitted until 2025-12-06 15:33:45.067761364 +0000 UTC m=+103.601115745 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/9fa121e1-7f2f-4912-945f-86cb199c3014-metrics-certs") pod "network-metrics-daemon-jmzd9" (UID: "9fa121e1-7f2f-4912-945f-86cb199c3014") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 06 15:33:13 crc kubenswrapper[5003]: I1206 15:33:13.150710 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:13 crc kubenswrapper[5003]: I1206 15:33:13.150775 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:13 crc kubenswrapper[5003]: I1206 15:33:13.150804 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:13 crc kubenswrapper[5003]: I1206 15:33:13.150830 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:13 crc kubenswrapper[5003]: I1206 15:33:13.150851 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:13Z","lastTransitionTime":"2025-12-06T15:33:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:13 crc kubenswrapper[5003]: I1206 15:33:13.252723 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:13 crc kubenswrapper[5003]: I1206 15:33:13.252769 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:13 crc kubenswrapper[5003]: I1206 15:33:13.252781 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:13 crc kubenswrapper[5003]: I1206 15:33:13.252798 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:13 crc kubenswrapper[5003]: I1206 15:33:13.252809 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:13Z","lastTransitionTime":"2025-12-06T15:33:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:13 crc kubenswrapper[5003]: I1206 15:33:13.355372 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:13 crc kubenswrapper[5003]: I1206 15:33:13.355425 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:13 crc kubenswrapper[5003]: I1206 15:33:13.355440 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:13 crc kubenswrapper[5003]: I1206 15:33:13.355461 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:13 crc kubenswrapper[5003]: I1206 15:33:13.355474 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:13Z","lastTransitionTime":"2025-12-06T15:33:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:13 crc kubenswrapper[5003]: I1206 15:33:13.457821 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:13 crc kubenswrapper[5003]: I1206 15:33:13.457873 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:13 crc kubenswrapper[5003]: I1206 15:33:13.457886 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:13 crc kubenswrapper[5003]: I1206 15:33:13.457904 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:13 crc kubenswrapper[5003]: I1206 15:33:13.457915 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:13Z","lastTransitionTime":"2025-12-06T15:33:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:13 crc kubenswrapper[5003]: I1206 15:33:13.559883 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:13 crc kubenswrapper[5003]: I1206 15:33:13.559919 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:13 crc kubenswrapper[5003]: I1206 15:33:13.559927 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:13 crc kubenswrapper[5003]: I1206 15:33:13.559942 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:13 crc kubenswrapper[5003]: I1206 15:33:13.559952 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:13Z","lastTransitionTime":"2025-12-06T15:33:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:13 crc kubenswrapper[5003]: I1206 15:33:13.662385 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:13 crc kubenswrapper[5003]: I1206 15:33:13.662429 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:13 crc kubenswrapper[5003]: I1206 15:33:13.662441 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:13 crc kubenswrapper[5003]: I1206 15:33:13.662456 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:13 crc kubenswrapper[5003]: I1206 15:33:13.662468 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:13Z","lastTransitionTime":"2025-12-06T15:33:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:13 crc kubenswrapper[5003]: I1206 15:33:13.712307 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 06 15:33:13 crc kubenswrapper[5003]: E1206 15:33:13.712448 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 06 15:33:13 crc kubenswrapper[5003]: I1206 15:33:13.712688 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 06 15:33:13 crc kubenswrapper[5003]: I1206 15:33:13.712737 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 06 15:33:13 crc kubenswrapper[5003]: E1206 15:33:13.712765 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 06 15:33:13 crc kubenswrapper[5003]: E1206 15:33:13.712967 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 06 15:33:13 crc kubenswrapper[5003]: I1206 15:33:13.777851 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:13 crc kubenswrapper[5003]: I1206 15:33:13.777895 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:13 crc kubenswrapper[5003]: I1206 15:33:13.777905 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:13 crc kubenswrapper[5003]: I1206 15:33:13.777919 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:13 crc kubenswrapper[5003]: I1206 15:33:13.777928 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:13Z","lastTransitionTime":"2025-12-06T15:33:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:13 crc kubenswrapper[5003]: I1206 15:33:13.880910 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:13 crc kubenswrapper[5003]: I1206 15:33:13.880966 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:13 crc kubenswrapper[5003]: I1206 15:33:13.880982 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:13 crc kubenswrapper[5003]: I1206 15:33:13.881004 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:13 crc kubenswrapper[5003]: I1206 15:33:13.881021 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:13Z","lastTransitionTime":"2025-12-06T15:33:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:13 crc kubenswrapper[5003]: I1206 15:33:13.982903 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:13 crc kubenswrapper[5003]: I1206 15:33:13.982959 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:13 crc kubenswrapper[5003]: I1206 15:33:13.982968 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:13 crc kubenswrapper[5003]: I1206 15:33:13.982979 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:13 crc kubenswrapper[5003]: I1206 15:33:13.982989 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:13Z","lastTransitionTime":"2025-12-06T15:33:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:14 crc kubenswrapper[5003]: I1206 15:33:14.084965 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:14 crc kubenswrapper[5003]: I1206 15:33:14.085011 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:14 crc kubenswrapper[5003]: I1206 15:33:14.085022 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:14 crc kubenswrapper[5003]: I1206 15:33:14.085040 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:14 crc kubenswrapper[5003]: I1206 15:33:14.085052 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:14Z","lastTransitionTime":"2025-12-06T15:33:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:14 crc kubenswrapper[5003]: I1206 15:33:14.187361 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:14 crc kubenswrapper[5003]: I1206 15:33:14.187403 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:14 crc kubenswrapper[5003]: I1206 15:33:14.187414 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:14 crc kubenswrapper[5003]: I1206 15:33:14.187429 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:14 crc kubenswrapper[5003]: I1206 15:33:14.187441 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:14Z","lastTransitionTime":"2025-12-06T15:33:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:14 crc kubenswrapper[5003]: I1206 15:33:14.290482 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:14 crc kubenswrapper[5003]: I1206 15:33:14.290560 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:14 crc kubenswrapper[5003]: I1206 15:33:14.290574 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:14 crc kubenswrapper[5003]: I1206 15:33:14.290613 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:14 crc kubenswrapper[5003]: I1206 15:33:14.290631 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:14Z","lastTransitionTime":"2025-12-06T15:33:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:14 crc kubenswrapper[5003]: I1206 15:33:14.393371 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:14 crc kubenswrapper[5003]: I1206 15:33:14.393434 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:14 crc kubenswrapper[5003]: I1206 15:33:14.393447 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:14 crc kubenswrapper[5003]: I1206 15:33:14.393470 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:14 crc kubenswrapper[5003]: I1206 15:33:14.393505 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:14Z","lastTransitionTime":"2025-12-06T15:33:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:14 crc kubenswrapper[5003]: I1206 15:33:14.496645 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:14 crc kubenswrapper[5003]: I1206 15:33:14.496717 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:14 crc kubenswrapper[5003]: I1206 15:33:14.496728 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:14 crc kubenswrapper[5003]: I1206 15:33:14.496746 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:14 crc kubenswrapper[5003]: I1206 15:33:14.496757 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:14Z","lastTransitionTime":"2025-12-06T15:33:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:14 crc kubenswrapper[5003]: I1206 15:33:14.599605 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:14 crc kubenswrapper[5003]: I1206 15:33:14.599666 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:14 crc kubenswrapper[5003]: I1206 15:33:14.599684 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:14 crc kubenswrapper[5003]: I1206 15:33:14.599707 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:14 crc kubenswrapper[5003]: I1206 15:33:14.599726 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:14Z","lastTransitionTime":"2025-12-06T15:33:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:14 crc kubenswrapper[5003]: I1206 15:33:14.702547 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:14 crc kubenswrapper[5003]: I1206 15:33:14.702580 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:14 crc kubenswrapper[5003]: I1206 15:33:14.702588 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:14 crc kubenswrapper[5003]: I1206 15:33:14.702602 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:14 crc kubenswrapper[5003]: I1206 15:33:14.702611 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:14Z","lastTransitionTime":"2025-12-06T15:33:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:14 crc kubenswrapper[5003]: I1206 15:33:14.711843 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-jmzd9" Dec 06 15:33:14 crc kubenswrapper[5003]: E1206 15:33:14.712018 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-jmzd9" podUID="9fa121e1-7f2f-4912-945f-86cb199c3014" Dec 06 15:33:14 crc kubenswrapper[5003]: I1206 15:33:14.805186 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:14 crc kubenswrapper[5003]: I1206 15:33:14.805261 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:14 crc kubenswrapper[5003]: I1206 15:33:14.805280 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:14 crc kubenswrapper[5003]: I1206 15:33:14.805306 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:14 crc kubenswrapper[5003]: I1206 15:33:14.805323 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:14Z","lastTransitionTime":"2025-12-06T15:33:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:14 crc kubenswrapper[5003]: I1206 15:33:14.907978 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:14 crc kubenswrapper[5003]: I1206 15:33:14.908017 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:14 crc kubenswrapper[5003]: I1206 15:33:14.908030 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:14 crc kubenswrapper[5003]: I1206 15:33:14.908045 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:14 crc kubenswrapper[5003]: I1206 15:33:14.908056 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:14Z","lastTransitionTime":"2025-12-06T15:33:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:15 crc kubenswrapper[5003]: I1206 15:33:15.010577 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:15 crc kubenswrapper[5003]: I1206 15:33:15.010648 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:15 crc kubenswrapper[5003]: I1206 15:33:15.010660 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:15 crc kubenswrapper[5003]: I1206 15:33:15.010678 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:15 crc kubenswrapper[5003]: I1206 15:33:15.010690 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:15Z","lastTransitionTime":"2025-12-06T15:33:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:15 crc kubenswrapper[5003]: I1206 15:33:15.114272 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:15 crc kubenswrapper[5003]: I1206 15:33:15.114332 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:15 crc kubenswrapper[5003]: I1206 15:33:15.114351 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:15 crc kubenswrapper[5003]: I1206 15:33:15.114375 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:15 crc kubenswrapper[5003]: I1206 15:33:15.114391 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:15Z","lastTransitionTime":"2025-12-06T15:33:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:15 crc kubenswrapper[5003]: I1206 15:33:15.216529 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:15 crc kubenswrapper[5003]: I1206 15:33:15.216584 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:15 crc kubenswrapper[5003]: I1206 15:33:15.216597 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:15 crc kubenswrapper[5003]: I1206 15:33:15.216612 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:15 crc kubenswrapper[5003]: I1206 15:33:15.216623 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:15Z","lastTransitionTime":"2025-12-06T15:33:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:15 crc kubenswrapper[5003]: I1206 15:33:15.318751 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:15 crc kubenswrapper[5003]: I1206 15:33:15.318793 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:15 crc kubenswrapper[5003]: I1206 15:33:15.318807 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:15 crc kubenswrapper[5003]: I1206 15:33:15.318823 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:15 crc kubenswrapper[5003]: I1206 15:33:15.318832 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:15Z","lastTransitionTime":"2025-12-06T15:33:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:15 crc kubenswrapper[5003]: I1206 15:33:15.421227 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:15 crc kubenswrapper[5003]: I1206 15:33:15.421290 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:15 crc kubenswrapper[5003]: I1206 15:33:15.421305 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:15 crc kubenswrapper[5003]: I1206 15:33:15.421328 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:15 crc kubenswrapper[5003]: I1206 15:33:15.421344 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:15Z","lastTransitionTime":"2025-12-06T15:33:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:15 crc kubenswrapper[5003]: I1206 15:33:15.536456 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:15 crc kubenswrapper[5003]: I1206 15:33:15.536604 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:15 crc kubenswrapper[5003]: I1206 15:33:15.536624 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:15 crc kubenswrapper[5003]: I1206 15:33:15.536677 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:15 crc kubenswrapper[5003]: I1206 15:33:15.536695 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:15Z","lastTransitionTime":"2025-12-06T15:33:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:15 crc kubenswrapper[5003]: I1206 15:33:15.639321 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:15 crc kubenswrapper[5003]: I1206 15:33:15.639363 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:15 crc kubenswrapper[5003]: I1206 15:33:15.639372 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:15 crc kubenswrapper[5003]: I1206 15:33:15.639389 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:15 crc kubenswrapper[5003]: I1206 15:33:15.639404 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:15Z","lastTransitionTime":"2025-12-06T15:33:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:15 crc kubenswrapper[5003]: I1206 15:33:15.711725 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 06 15:33:15 crc kubenswrapper[5003]: I1206 15:33:15.711852 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 06 15:33:15 crc kubenswrapper[5003]: E1206 15:33:15.711857 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 06 15:33:15 crc kubenswrapper[5003]: I1206 15:33:15.711974 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 06 15:33:15 crc kubenswrapper[5003]: E1206 15:33:15.712026 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 06 15:33:15 crc kubenswrapper[5003]: E1206 15:33:15.712129 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 06 15:33:15 crc kubenswrapper[5003]: I1206 15:33:15.741723 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:15 crc kubenswrapper[5003]: I1206 15:33:15.741782 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:15 crc kubenswrapper[5003]: I1206 15:33:15.741792 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:15 crc kubenswrapper[5003]: I1206 15:33:15.741807 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:15 crc kubenswrapper[5003]: I1206 15:33:15.741817 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:15Z","lastTransitionTime":"2025-12-06T15:33:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:15 crc kubenswrapper[5003]: I1206 15:33:15.844827 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:15 crc kubenswrapper[5003]: I1206 15:33:15.844925 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:15 crc kubenswrapper[5003]: I1206 15:33:15.844939 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:15 crc kubenswrapper[5003]: I1206 15:33:15.844959 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:15 crc kubenswrapper[5003]: I1206 15:33:15.844972 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:15Z","lastTransitionTime":"2025-12-06T15:33:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:15 crc kubenswrapper[5003]: I1206 15:33:15.947759 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:15 crc kubenswrapper[5003]: I1206 15:33:15.947819 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:15 crc kubenswrapper[5003]: I1206 15:33:15.947842 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:15 crc kubenswrapper[5003]: I1206 15:33:15.947871 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:15 crc kubenswrapper[5003]: I1206 15:33:15.947893 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:15Z","lastTransitionTime":"2025-12-06T15:33:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:16 crc kubenswrapper[5003]: I1206 15:33:16.050917 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:16 crc kubenswrapper[5003]: I1206 15:33:16.050960 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:16 crc kubenswrapper[5003]: I1206 15:33:16.050998 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:16 crc kubenswrapper[5003]: I1206 15:33:16.051019 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:16 crc kubenswrapper[5003]: I1206 15:33:16.051031 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:16Z","lastTransitionTime":"2025-12-06T15:33:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:16 crc kubenswrapper[5003]: I1206 15:33:16.125758 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:16 crc kubenswrapper[5003]: I1206 15:33:16.125851 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:16 crc kubenswrapper[5003]: I1206 15:33:16.125870 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:16 crc kubenswrapper[5003]: I1206 15:33:16.125940 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:16 crc kubenswrapper[5003]: I1206 15:33:16.125968 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:16Z","lastTransitionTime":"2025-12-06T15:33:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:16 crc kubenswrapper[5003]: I1206 15:33:16.140870 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-9kdpn_350e8b9a-b7bf-4dc9-abe9-d10f7a088be3/kube-multus/0.log" Dec 06 15:33:16 crc kubenswrapper[5003]: I1206 15:33:16.140939 5003 generic.go:334] "Generic (PLEG): container finished" podID="350e8b9a-b7bf-4dc9-abe9-d10f7a088be3" containerID="e9b32883e20da5b298374f7eebd0a21c5ca51cca23543fc78dfe3edb04e3b661" exitCode=1 Dec 06 15:33:16 crc kubenswrapper[5003]: I1206 15:33:16.140978 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-9kdpn" event={"ID":"350e8b9a-b7bf-4dc9-abe9-d10f7a088be3","Type":"ContainerDied","Data":"e9b32883e20da5b298374f7eebd0a21c5ca51cca23543fc78dfe3edb04e3b661"} Dec 06 15:33:16 crc kubenswrapper[5003]: E1206 15:33:16.141355 5003 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-06T15:33:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-06T15:33:16Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-06T15:33:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-06T15:33:16Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-06T15:33:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-06T15:33:16Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-06T15:33:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-06T15:33:16Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"010ec561-c8bb-454b-ab74-658add58caba\\\",\\\"systemUUID\\\":\\\"42b9e6db-6fd4-4e84-a5b7-176f28bfe2f1\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:33:16Z is after 2025-08-24T17:21:41Z" Dec 06 15:33:16 crc kubenswrapper[5003]: I1206 15:33:16.141599 5003 scope.go:117] "RemoveContainer" containerID="e9b32883e20da5b298374f7eebd0a21c5ca51cca23543fc78dfe3edb04e3b661" Dec 06 15:33:16 crc kubenswrapper[5003]: I1206 15:33:16.154007 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:16 crc kubenswrapper[5003]: I1206 15:33:16.154072 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:16 crc kubenswrapper[5003]: I1206 15:33:16.154091 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:16 crc kubenswrapper[5003]: I1206 15:33:16.154117 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:16 crc kubenswrapper[5003]: I1206 15:33:16.154143 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:16Z","lastTransitionTime":"2025-12-06T15:33:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:16 crc kubenswrapper[5003]: I1206 15:33:16.172955 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:33:16Z is after 2025-08-24T17:21:41Z" Dec 06 15:33:16 crc kubenswrapper[5003]: E1206 15:33:16.174286 5003 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-06T15:33:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-06T15:33:16Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-06T15:33:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-06T15:33:16Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-06T15:33:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-06T15:33:16Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-06T15:33:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-06T15:33:16Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"010ec561-c8bb-454b-ab74-658add58caba\\\",\\\"systemUUID\\\":\\\"42b9e6db-6fd4-4e84-a5b7-176f28bfe2f1\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:33:16Z is after 2025-08-24T17:21:41Z" Dec 06 15:33:16 crc kubenswrapper[5003]: I1206 15:33:16.179579 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:16 crc kubenswrapper[5003]: I1206 15:33:16.179629 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:16 crc kubenswrapper[5003]: I1206 15:33:16.179645 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:16 crc kubenswrapper[5003]: I1206 15:33:16.179666 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:16 crc kubenswrapper[5003]: I1206 15:33:16.179681 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:16Z","lastTransitionTime":"2025-12-06T15:33:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:16 crc kubenswrapper[5003]: I1206 15:33:16.191914 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-qcqkl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"956ed4a8-8918-48eb-a2f8-f35a9ae17fde\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8ae5ee47d26422cffca5f12a4ee1455dcd34c148b1b490d69b88280b4497da11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbhrw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-qcqkl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:33:16Z is after 2025-08-24T17:21:41Z" Dec 06 15:33:16 crc kubenswrapper[5003]: E1206 15:33:16.194419 5003 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-06T15:33:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-06T15:33:16Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-06T15:33:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-06T15:33:16Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-06T15:33:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-06T15:33:16Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-06T15:33:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-06T15:33:16Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"010ec561-c8bb-454b-ab74-658add58caba\\\",\\\"systemUUID\\\":\\\"42b9e6db-6fd4-4e84-a5b7-176f28bfe2f1\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:33:16Z is after 2025-08-24T17:21:41Z" Dec 06 15:33:16 crc kubenswrapper[5003]: I1206 15:33:16.198730 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:16 crc kubenswrapper[5003]: I1206 15:33:16.198770 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:16 crc kubenswrapper[5003]: I1206 15:33:16.198780 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:16 crc kubenswrapper[5003]: I1206 15:33:16.198794 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:16 crc kubenswrapper[5003]: I1206 15:33:16.198803 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:16Z","lastTransitionTime":"2025-12-06T15:33:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:16 crc kubenswrapper[5003]: I1206 15:33:16.212200 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-9kdpn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"350e8b9a-b7bf-4dc9-abe9-d10f7a088be3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:33:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:33:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e9b32883e20da5b298374f7eebd0a21c5ca51cca23543fc78dfe3edb04e3b661\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e9b32883e20da5b298374f7eebd0a21c5ca51cca23543fc78dfe3edb04e3b661\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-06T15:33:15Z\\\",\\\"message\\\":\\\"2025-12-06T15:32:30+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_572b1774-f930-470b-aa25-26a311d76ba2\\\\n2025-12-06T15:32:30+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_572b1774-f930-470b-aa25-26a311d76ba2 to /host/opt/cni/bin/\\\\n2025-12-06T15:32:30Z [verbose] multus-daemon started\\\\n2025-12-06T15:32:30Z [verbose] Readiness Indicator file check\\\\n2025-12-06T15:33:15Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-46c8r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-9kdpn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:33:16Z is after 2025-08-24T17:21:41Z" Dec 06 15:33:16 crc kubenswrapper[5003]: E1206 15:33:16.214836 5003 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-06T15:33:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-06T15:33:16Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-06T15:33:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-06T15:33:16Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-06T15:33:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-06T15:33:16Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-06T15:33:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-06T15:33:16Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"010ec561-c8bb-454b-ab74-658add58caba\\\",\\\"systemUUID\\\":\\\"42b9e6db-6fd4-4e84-a5b7-176f28bfe2f1\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:33:16Z is after 2025-08-24T17:21:41Z" Dec 06 15:33:16 crc kubenswrapper[5003]: I1206 15:33:16.218460 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:16 crc kubenswrapper[5003]: I1206 15:33:16.218514 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:16 crc kubenswrapper[5003]: I1206 15:33:16.218523 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:16 crc kubenswrapper[5003]: I1206 15:33:16.218535 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:16 crc kubenswrapper[5003]: I1206 15:33:16.218544 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:16Z","lastTransitionTime":"2025-12-06T15:33:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:16 crc kubenswrapper[5003]: I1206 15:33:16.232959 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-jmzd9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9fa121e1-7f2f-4912-945f-86cb199c3014\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:41Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:41Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9s4lw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9s4lw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:41Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-jmzd9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:33:16Z is after 2025-08-24T17:21:41Z" Dec 06 15:33:16 crc kubenswrapper[5003]: E1206 15:33:16.233260 5003 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-06T15:33:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-06T15:33:16Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-06T15:33:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-06T15:33:16Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-06T15:33:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-06T15:33:16Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-06T15:33:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-06T15:33:16Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"010ec561-c8bb-454b-ab74-658add58caba\\\",\\\"systemUUID\\\":\\\"42b9e6db-6fd4-4e84-a5b7-176f28bfe2f1\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:33:16Z is after 2025-08-24T17:21:41Z" Dec 06 15:33:16 crc kubenswrapper[5003]: E1206 15:33:16.233803 5003 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 06 15:33:16 crc kubenswrapper[5003]: I1206 15:33:16.235655 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:16 crc kubenswrapper[5003]: I1206 15:33:16.235694 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:16 crc kubenswrapper[5003]: I1206 15:33:16.235707 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:16 crc kubenswrapper[5003]: I1206 15:33:16.235726 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:16 crc kubenswrapper[5003]: I1206 15:33:16.235739 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:16Z","lastTransitionTime":"2025-12-06T15:33:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:16 crc kubenswrapper[5003]: I1206 15:33:16.251934 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f25cfd72-ae1c-45c6-bf50-3f3cd455c1ad\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://15818f84cfa472a42a18bafe5ff4a71da326b2f5871f47e693d5e1a1b3c8b986\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8c1591171f15bdf52339cb914e52de4dad9c34f1a6b6bb882f15bb41308a5b7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://94df3e8c0295aedc3bf7b97296d443b5240d17fcd83f8e8cf1bc9730740d7f29\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3bd16db35730810799b00301bb8a68f91d69dc93dac04638d5c187bdd374393d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3bd16db35730810799b00301bb8a68f91d69dc93dac04638d5c187bdd374393d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:02Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:02Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:33:16Z is after 2025-08-24T17:21:41Z" Dec 06 15:33:16 crc kubenswrapper[5003]: I1206 15:33:16.266209 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:33:16Z is after 2025-08-24T17:21:41Z" Dec 06 15:33:16 crc kubenswrapper[5003]: I1206 15:33:16.277593 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-mdz5n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2a9b6e3b-054f-40de-9ad6-dd7eb78eaea1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cac97fc8a54c2fbd81b0abade11a987e5226084d8a4f75ddbbaad1f6c4cb9783\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wqmpg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:25Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-mdz5n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:33:16Z is after 2025-08-24T17:21:41Z" Dec 06 15:33:16 crc kubenswrapper[5003]: I1206 15:33:16.291141 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-w25db" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1a047c4d-003e-4668-9b96-945eab34ab68\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b3a6db6c2c8e6a283f7b46ef28ba7310d3cfc7054cfd8cbcc290ebd0d26dcf4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6dn2k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b457ba1d45f5e1524b8539c4c7dc77b24f6d5aa8172b46962e31e4fb6723b7ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6dn2k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:26Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-w25db\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:33:16Z is after 2025-08-24T17:21:41Z" Dec 06 15:33:16 crc kubenswrapper[5003]: I1206 15:33:16.303802 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-7788j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f27a64b8-ecd1-4201-a3bd-a4f5a0aa05a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4bf147399c10dd7e654abd5213c4d90e8aa9feca7f8c6032c16576a25aeace68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twj7c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f47c00cc6d341dbe1a5f3495f04fe4e695370952f6d6b209a5aaff1ace9d17e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twj7c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:39Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-7788j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:33:16Z is after 2025-08-24T17:21:41Z" Dec 06 15:33:16 crc kubenswrapper[5003]: I1206 15:33:16.326840 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-j4rf7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7dc41f9e-e763-4a9a-a064-f65bc24332b9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://467dcc8ed3936c6fb24f6c6c3e42eccf3597ce6920d12b253946a123ee22faf8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://79eb73c778f65e6694b0a3243db40d605557c86145f9b40ce2dfcf922ec7f38f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://79eb73c778f65e6694b0a3243db40d605557c86145f9b40ce2dfcf922ec7f38f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://10cf47f05fda3c702decadc5c1cf0b6fb4df56993610f4d7bc2809e685109f6c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://10cf47f05fda3c702decadc5c1cf0b6fb4df56993610f4d7bc2809e685109f6c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a33d5c66fd9234a89ff1f28e4863f80104b5d1caccd24cafa22a4598a42906c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a33d5c66fd9234a89ff1f28e4863f80104b5d1caccd24cafa22a4598a42906c3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://895f9f2647488793d0875e8cb05a6e6515733dae00a856e6a4705702adc3a5ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://895f9f2647488793d0875e8cb05a6e6515733dae00a856e6a4705702adc3a5ed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a02f680341e0dfb569ffcd7902d5b1fe3d575a423ff541d86c7bdd4d76b733b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a02f680341e0dfb569ffcd7902d5b1fe3d575a423ff541d86c7bdd4d76b733b0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f6cda90c763eedc34903164d30c28d3cb696f658455d0a972fbf8d8e5505a587\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f6cda90c763eedc34903164d30c28d3cb696f658455d0a972fbf8d8e5505a587\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-j4rf7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:33:16Z is after 2025-08-24T17:21:41Z" Dec 06 15:33:16 crc kubenswrapper[5003]: I1206 15:33:16.338852 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:16 crc kubenswrapper[5003]: I1206 15:33:16.338912 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:16 crc kubenswrapper[5003]: I1206 15:33:16.338924 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:16 crc kubenswrapper[5003]: I1206 15:33:16.338940 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:16 crc kubenswrapper[5003]: I1206 15:33:16.338951 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:16Z","lastTransitionTime":"2025-12-06T15:33:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:16 crc kubenswrapper[5003]: I1206 15:33:16.342698 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5ae2d36f-5a31-4da3-aae8-0378c481f230\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9f683cdba22afa5d1d069cc32c6c83addf344c8b0ba3b39b7a2ca527408922c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c777cc5d07ff005aa8001da8a566484710f0253e4a6061e3297a884c6153a7d0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ac8291e7fa9a5ff379474802b6eae771542705aef4c443cc3123837d10dd9e5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae2215e89d65c7bd69288cda74cdd83f0349f57b47f80ff1fa03d60a484558b5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://efc65bc9bb60202069cb51eed80fb62e527399c4189554e791bab3e23993c95c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-06T15:32:20Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1206 15:32:14.957831 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1206 15:32:14.959306 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1213261896/tls.crt::/tmp/serving-cert-1213261896/tls.key\\\\\\\"\\\\nI1206 15:32:20.684925 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1206 15:32:20.690335 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1206 15:32:20.690422 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1206 15:32:20.690568 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1206 15:32:20.690599 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1206 15:32:20.700990 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1206 15:32:20.701016 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1206 15:32:20.701048 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1206 15:32:20.701062 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1206 15:32:20.701075 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1206 15:32:20.701085 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1206 15:32:20.701093 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1206 15:32:20.701100 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1206 15:32:20.702607 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:04Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6013b3a95ef99138e6ea971b51a45f65923c09435a2595bba7af91eebb28d76\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4256540a96bdcd018eb514cf70eea305513bb418afa89c8bfa5179c67822380e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4256540a96bdcd018eb514cf70eea305513bb418afa89c8bfa5179c67822380e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:33:16Z is after 2025-08-24T17:21:41Z" Dec 06 15:33:16 crc kubenswrapper[5003]: I1206 15:33:16.356277 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c48c2555-6e1a-4e2a-801c-de22b016a80d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d0880a937c71f4a7b46ffc4dcc10f50f5db23655dedb5d7e95b36cf0fcb44928\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f5d5d47cea1d06df2f36a0a15126c2419e5051d84842c781921577bec3c65f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5e7cb74b8b6e2a00de6dbbd1689b52fcc3ae9862d40685bab7a805646abd4a3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3377599a5ceebbce7bd9d45a6f78122fc48d86ff917a6dcfc03304ca3361659\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:02Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:33:16Z is after 2025-08-24T17:21:41Z" Dec 06 15:33:16 crc kubenswrapper[5003]: I1206 15:33:16.373311 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa744e792aa97558246eaa95c80a307138b1a6e08fe5de144b3fff62f3932e20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:33:16Z is after 2025-08-24T17:21:41Z" Dec 06 15:33:16 crc kubenswrapper[5003]: I1206 15:33:16.385346 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://224b845cf75da20cf87e5435770288110776a0edd92c9c110d6dc539419ecb7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0df37db3b8682032a0065b4ca1cead69bf0b32cb4e2fab89c53e14eb2169b7e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:33:16Z is after 2025-08-24T17:21:41Z" Dec 06 15:33:16 crc kubenswrapper[5003]: I1206 15:33:16.398332 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:33:16Z is after 2025-08-24T17:21:41Z" Dec 06 15:33:16 crc kubenswrapper[5003]: I1206 15:33:16.410689 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:24Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:24Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ab9b858ed30211c345e146452a18252752eeedcb3be8054daa2af371297cbb61\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:33:16Z is after 2025-08-24T17:21:41Z" Dec 06 15:33:16 crc kubenswrapper[5003]: I1206 15:33:16.430864 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7xwd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a695d94-271c-45bc-8a89-dfdecb57ec00\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b26a01a6a0644d88028ebc041e00353091f26407f109e02e4aec1ec0e382a4c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b549c3d1ba39068a4f1ce344e0807eed3b4e73ee1902d02cebffe005697201d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5240dcf05f3d661b92408f4c39999571d023d9acdeb31d4a4a99f50e31a627b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e240d517f92fb2b8e24bb6f1c1d98869b41adf8bc3ab687a4426840c7010235e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b871ea5e9d2093579af10786c02da9d7f5afa5351933be742b67b1b682733119\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://276d6d2f680f4e0b2038d12b161e3fd3f85417b5d776b9661559b4812ead1dd9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f0ad17b1a040807cb7cca5ee53ef54da1870f63cadccb3aa20421b08016cf10\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2f0ad17b1a040807cb7cca5ee53ef54da1870f63cadccb3aa20421b08016cf10\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-06T15:32:53Z\\\",\\\"message\\\":\\\"d44-bbd8-dba87b7dbaf0}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1206 15:32:53.311445 6646 ovn.go:134] Ensuring zone local for Pod openshift-multus/multus-additional-cni-plugins-j4rf7 in node crc\\\\nI1206 15:32:53.311457 6646 obj_retry.go:386] Retry successful for *v1.Pod openshift-multus/multus-additional-cni-plugins-j4rf7 after 0 failed attempt(s)\\\\nI1206 15:32:53.311473 6646 default_network_controller.go:776] Recording success event on pod openshift-multus/multus-additional-cni-plugins-j4rf7\\\\nF1206 15:32:53.311504 6646 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:53Z is after 2025-08-24\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:51Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-p7xwd_openshift-ovn-kubernetes(8a695d94-271c-45bc-8a89-dfdecb57ec00)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c56c8c7aa570bb81c41923b20b09d6de6b278e56ca466e7e0ee3cecf113c37d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a00902f2c2c7ca56d86553f78094f40f59ea9ef125f5a388aafd6d7fd0c20d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9a00902f2c2c7ca56d86553f78094f40f59ea9ef125f5a388aafd6d7fd0c20d9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-p7xwd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:33:16Z is after 2025-08-24T17:21:41Z" Dec 06 15:33:16 crc kubenswrapper[5003]: I1206 15:33:16.441466 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:16 crc kubenswrapper[5003]: I1206 15:33:16.441517 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:16 crc kubenswrapper[5003]: I1206 15:33:16.441528 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:16 crc kubenswrapper[5003]: I1206 15:33:16.441541 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:16 crc kubenswrapper[5003]: I1206 15:33:16.441550 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:16Z","lastTransitionTime":"2025-12-06T15:33:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:16 crc kubenswrapper[5003]: I1206 15:33:16.544033 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:16 crc kubenswrapper[5003]: I1206 15:33:16.544070 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:16 crc kubenswrapper[5003]: I1206 15:33:16.544080 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:16 crc kubenswrapper[5003]: I1206 15:33:16.544096 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:16 crc kubenswrapper[5003]: I1206 15:33:16.544108 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:16Z","lastTransitionTime":"2025-12-06T15:33:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:16 crc kubenswrapper[5003]: I1206 15:33:16.646671 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:16 crc kubenswrapper[5003]: I1206 15:33:16.646709 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:16 crc kubenswrapper[5003]: I1206 15:33:16.646719 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:16 crc kubenswrapper[5003]: I1206 15:33:16.646735 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:16 crc kubenswrapper[5003]: I1206 15:33:16.646746 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:16Z","lastTransitionTime":"2025-12-06T15:33:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:16 crc kubenswrapper[5003]: I1206 15:33:16.711626 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-jmzd9" Dec 06 15:33:16 crc kubenswrapper[5003]: E1206 15:33:16.711743 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-jmzd9" podUID="9fa121e1-7f2f-4912-945f-86cb199c3014" Dec 06 15:33:16 crc kubenswrapper[5003]: I1206 15:33:16.748711 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:16 crc kubenswrapper[5003]: I1206 15:33:16.748740 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:16 crc kubenswrapper[5003]: I1206 15:33:16.748748 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:16 crc kubenswrapper[5003]: I1206 15:33:16.748761 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:16 crc kubenswrapper[5003]: I1206 15:33:16.748771 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:16Z","lastTransitionTime":"2025-12-06T15:33:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:16 crc kubenswrapper[5003]: I1206 15:33:16.851790 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:16 crc kubenswrapper[5003]: I1206 15:33:16.851841 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:16 crc kubenswrapper[5003]: I1206 15:33:16.851854 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:16 crc kubenswrapper[5003]: I1206 15:33:16.851870 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:16 crc kubenswrapper[5003]: I1206 15:33:16.851880 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:16Z","lastTransitionTime":"2025-12-06T15:33:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:16 crc kubenswrapper[5003]: I1206 15:33:16.954736 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:16 crc kubenswrapper[5003]: I1206 15:33:16.954797 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:16 crc kubenswrapper[5003]: I1206 15:33:16.954814 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:16 crc kubenswrapper[5003]: I1206 15:33:16.954839 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:16 crc kubenswrapper[5003]: I1206 15:33:16.954855 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:16Z","lastTransitionTime":"2025-12-06T15:33:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:17 crc kubenswrapper[5003]: I1206 15:33:17.057948 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:17 crc kubenswrapper[5003]: I1206 15:33:17.057993 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:17 crc kubenswrapper[5003]: I1206 15:33:17.058007 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:17 crc kubenswrapper[5003]: I1206 15:33:17.058025 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:17 crc kubenswrapper[5003]: I1206 15:33:17.058043 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:17Z","lastTransitionTime":"2025-12-06T15:33:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:17 crc kubenswrapper[5003]: I1206 15:33:17.146716 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-9kdpn_350e8b9a-b7bf-4dc9-abe9-d10f7a088be3/kube-multus/0.log" Dec 06 15:33:17 crc kubenswrapper[5003]: I1206 15:33:17.146783 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-9kdpn" event={"ID":"350e8b9a-b7bf-4dc9-abe9-d10f7a088be3","Type":"ContainerStarted","Data":"22d16feb3425c5cac7562c4468723b0aae567d2d31db5516b3b0ce7d38d91c6b"} Dec 06 15:33:17 crc kubenswrapper[5003]: I1206 15:33:17.161829 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:17 crc kubenswrapper[5003]: I1206 15:33:17.161899 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:17 crc kubenswrapper[5003]: I1206 15:33:17.161916 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:17 crc kubenswrapper[5003]: I1206 15:33:17.161942 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:17 crc kubenswrapper[5003]: I1206 15:33:17.161958 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:17Z","lastTransitionTime":"2025-12-06T15:33:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:17 crc kubenswrapper[5003]: I1206 15:33:17.167909 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:33:17Z is after 2025-08-24T17:21:41Z" Dec 06 15:33:17 crc kubenswrapper[5003]: I1206 15:33:17.182119 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:24Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:24Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ab9b858ed30211c345e146452a18252752eeedcb3be8054daa2af371297cbb61\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:33:17Z is after 2025-08-24T17:21:41Z" Dec 06 15:33:17 crc kubenswrapper[5003]: I1206 15:33:17.210046 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7xwd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a695d94-271c-45bc-8a89-dfdecb57ec00\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b26a01a6a0644d88028ebc041e00353091f26407f109e02e4aec1ec0e382a4c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b549c3d1ba39068a4f1ce344e0807eed3b4e73ee1902d02cebffe005697201d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5240dcf05f3d661b92408f4c39999571d023d9acdeb31d4a4a99f50e31a627b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e240d517f92fb2b8e24bb6f1c1d98869b41adf8bc3ab687a4426840c7010235e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b871ea5e9d2093579af10786c02da9d7f5afa5351933be742b67b1b682733119\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://276d6d2f680f4e0b2038d12b161e3fd3f85417b5d776b9661559b4812ead1dd9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f0ad17b1a040807cb7cca5ee53ef54da1870f63cadccb3aa20421b08016cf10\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2f0ad17b1a040807cb7cca5ee53ef54da1870f63cadccb3aa20421b08016cf10\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-06T15:32:53Z\\\",\\\"message\\\":\\\"d44-bbd8-dba87b7dbaf0}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1206 15:32:53.311445 6646 ovn.go:134] Ensuring zone local for Pod openshift-multus/multus-additional-cni-plugins-j4rf7 in node crc\\\\nI1206 15:32:53.311457 6646 obj_retry.go:386] Retry successful for *v1.Pod openshift-multus/multus-additional-cni-plugins-j4rf7 after 0 failed attempt(s)\\\\nI1206 15:32:53.311473 6646 default_network_controller.go:776] Recording success event on pod openshift-multus/multus-additional-cni-plugins-j4rf7\\\\nF1206 15:32:53.311504 6646 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:53Z is after 2025-08-24\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:51Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-p7xwd_openshift-ovn-kubernetes(8a695d94-271c-45bc-8a89-dfdecb57ec00)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c56c8c7aa570bb81c41923b20b09d6de6b278e56ca466e7e0ee3cecf113c37d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a00902f2c2c7ca56d86553f78094f40f59ea9ef125f5a388aafd6d7fd0c20d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9a00902f2c2c7ca56d86553f78094f40f59ea9ef125f5a388aafd6d7fd0c20d9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-p7xwd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:33:17Z is after 2025-08-24T17:21:41Z" Dec 06 15:33:17 crc kubenswrapper[5003]: I1206 15:33:17.224884 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-9kdpn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"350e8b9a-b7bf-4dc9-abe9-d10f7a088be3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:33:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:33:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22d16feb3425c5cac7562c4468723b0aae567d2d31db5516b3b0ce7d38d91c6b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e9b32883e20da5b298374f7eebd0a21c5ca51cca23543fc78dfe3edb04e3b661\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-06T15:33:15Z\\\",\\\"message\\\":\\\"2025-12-06T15:32:30+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_572b1774-f930-470b-aa25-26a311d76ba2\\\\n2025-12-06T15:32:30+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_572b1774-f930-470b-aa25-26a311d76ba2 to /host/opt/cni/bin/\\\\n2025-12-06T15:32:30Z [verbose] multus-daemon started\\\\n2025-12-06T15:32:30Z [verbose] Readiness Indicator file check\\\\n2025-12-06T15:33:15Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:28Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:33:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-46c8r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-9kdpn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:33:17Z is after 2025-08-24T17:21:41Z" Dec 06 15:33:17 crc kubenswrapper[5003]: I1206 15:33:17.237014 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-jmzd9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9fa121e1-7f2f-4912-945f-86cb199c3014\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:41Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:41Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9s4lw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9s4lw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:41Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-jmzd9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:33:17Z is after 2025-08-24T17:21:41Z" Dec 06 15:33:17 crc kubenswrapper[5003]: I1206 15:33:17.250020 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:33:17Z is after 2025-08-24T17:21:41Z" Dec 06 15:33:17 crc kubenswrapper[5003]: I1206 15:33:17.265078 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:17 crc kubenswrapper[5003]: I1206 15:33:17.265122 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:17 crc kubenswrapper[5003]: I1206 15:33:17.265059 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-qcqkl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"956ed4a8-8918-48eb-a2f8-f35a9ae17fde\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8ae5ee47d26422cffca5f12a4ee1455dcd34c148b1b490d69b88280b4497da11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbhrw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-qcqkl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:33:17Z is after 2025-08-24T17:21:41Z" Dec 06 15:33:17 crc kubenswrapper[5003]: I1206 15:33:17.265134 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:17 crc kubenswrapper[5003]: I1206 15:33:17.265308 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:17 crc kubenswrapper[5003]: I1206 15:33:17.265324 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:17Z","lastTransitionTime":"2025-12-06T15:33:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:17 crc kubenswrapper[5003]: I1206 15:33:17.280127 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-w25db" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1a047c4d-003e-4668-9b96-945eab34ab68\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b3a6db6c2c8e6a283f7b46ef28ba7310d3cfc7054cfd8cbcc290ebd0d26dcf4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6dn2k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b457ba1d45f5e1524b8539c4c7dc77b24f6d5aa8172b46962e31e4fb6723b7ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6dn2k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:26Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-w25db\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:33:17Z is after 2025-08-24T17:21:41Z" Dec 06 15:33:17 crc kubenswrapper[5003]: I1206 15:33:17.292769 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-7788j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f27a64b8-ecd1-4201-a3bd-a4f5a0aa05a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4bf147399c10dd7e654abd5213c4d90e8aa9feca7f8c6032c16576a25aeace68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twj7c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f47c00cc6d341dbe1a5f3495f04fe4e695370952f6d6b209a5aaff1ace9d17e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twj7c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:39Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-7788j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:33:17Z is after 2025-08-24T17:21:41Z" Dec 06 15:33:17 crc kubenswrapper[5003]: I1206 15:33:17.306792 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f25cfd72-ae1c-45c6-bf50-3f3cd455c1ad\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://15818f84cfa472a42a18bafe5ff4a71da326b2f5871f47e693d5e1a1b3c8b986\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8c1591171f15bdf52339cb914e52de4dad9c34f1a6b6bb882f15bb41308a5b7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://94df3e8c0295aedc3bf7b97296d443b5240d17fcd83f8e8cf1bc9730740d7f29\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3bd16db35730810799b00301bb8a68f91d69dc93dac04638d5c187bdd374393d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3bd16db35730810799b00301bb8a68f91d69dc93dac04638d5c187bdd374393d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:02Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:02Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:33:17Z is after 2025-08-24T17:21:41Z" Dec 06 15:33:17 crc kubenswrapper[5003]: I1206 15:33:17.323185 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:33:17Z is after 2025-08-24T17:21:41Z" Dec 06 15:33:17 crc kubenswrapper[5003]: I1206 15:33:17.336814 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-mdz5n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2a9b6e3b-054f-40de-9ad6-dd7eb78eaea1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cac97fc8a54c2fbd81b0abade11a987e5226084d8a4f75ddbbaad1f6c4cb9783\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wqmpg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:25Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-mdz5n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:33:17Z is after 2025-08-24T17:21:41Z" Dec 06 15:33:17 crc kubenswrapper[5003]: I1206 15:33:17.351951 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa744e792aa97558246eaa95c80a307138b1a6e08fe5de144b3fff62f3932e20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:33:17Z is after 2025-08-24T17:21:41Z" Dec 06 15:33:17 crc kubenswrapper[5003]: I1206 15:33:17.366195 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://224b845cf75da20cf87e5435770288110776a0edd92c9c110d6dc539419ecb7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0df37db3b8682032a0065b4ca1cead69bf0b32cb4e2fab89c53e14eb2169b7e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:33:17Z is after 2025-08-24T17:21:41Z" Dec 06 15:33:17 crc kubenswrapper[5003]: I1206 15:33:17.369219 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:17 crc kubenswrapper[5003]: I1206 15:33:17.369256 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:17 crc kubenswrapper[5003]: I1206 15:33:17.369267 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:17 crc kubenswrapper[5003]: I1206 15:33:17.369291 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:17 crc kubenswrapper[5003]: I1206 15:33:17.369306 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:17Z","lastTransitionTime":"2025-12-06T15:33:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:17 crc kubenswrapper[5003]: I1206 15:33:17.383681 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-j4rf7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7dc41f9e-e763-4a9a-a064-f65bc24332b9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://467dcc8ed3936c6fb24f6c6c3e42eccf3597ce6920d12b253946a123ee22faf8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://79eb73c778f65e6694b0a3243db40d605557c86145f9b40ce2dfcf922ec7f38f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://79eb73c778f65e6694b0a3243db40d605557c86145f9b40ce2dfcf922ec7f38f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://10cf47f05fda3c702decadc5c1cf0b6fb4df56993610f4d7bc2809e685109f6c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://10cf47f05fda3c702decadc5c1cf0b6fb4df56993610f4d7bc2809e685109f6c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a33d5c66fd9234a89ff1f28e4863f80104b5d1caccd24cafa22a4598a42906c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a33d5c66fd9234a89ff1f28e4863f80104b5d1caccd24cafa22a4598a42906c3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://895f9f2647488793d0875e8cb05a6e6515733dae00a856e6a4705702adc3a5ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://895f9f2647488793d0875e8cb05a6e6515733dae00a856e6a4705702adc3a5ed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a02f680341e0dfb569ffcd7902d5b1fe3d575a423ff541d86c7bdd4d76b733b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a02f680341e0dfb569ffcd7902d5b1fe3d575a423ff541d86c7bdd4d76b733b0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f6cda90c763eedc34903164d30c28d3cb696f658455d0a972fbf8d8e5505a587\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f6cda90c763eedc34903164d30c28d3cb696f658455d0a972fbf8d8e5505a587\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-j4rf7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:33:17Z is after 2025-08-24T17:21:41Z" Dec 06 15:33:17 crc kubenswrapper[5003]: I1206 15:33:17.397808 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5ae2d36f-5a31-4da3-aae8-0378c481f230\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9f683cdba22afa5d1d069cc32c6c83addf344c8b0ba3b39b7a2ca527408922c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c777cc5d07ff005aa8001da8a566484710f0253e4a6061e3297a884c6153a7d0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ac8291e7fa9a5ff379474802b6eae771542705aef4c443cc3123837d10dd9e5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae2215e89d65c7bd69288cda74cdd83f0349f57b47f80ff1fa03d60a484558b5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://efc65bc9bb60202069cb51eed80fb62e527399c4189554e791bab3e23993c95c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-06T15:32:20Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1206 15:32:14.957831 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1206 15:32:14.959306 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1213261896/tls.crt::/tmp/serving-cert-1213261896/tls.key\\\\\\\"\\\\nI1206 15:32:20.684925 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1206 15:32:20.690335 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1206 15:32:20.690422 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1206 15:32:20.690568 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1206 15:32:20.690599 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1206 15:32:20.700990 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1206 15:32:20.701016 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1206 15:32:20.701048 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1206 15:32:20.701062 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1206 15:32:20.701075 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1206 15:32:20.701085 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1206 15:32:20.701093 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1206 15:32:20.701100 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1206 15:32:20.702607 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:04Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6013b3a95ef99138e6ea971b51a45f65923c09435a2595bba7af91eebb28d76\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4256540a96bdcd018eb514cf70eea305513bb418afa89c8bfa5179c67822380e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4256540a96bdcd018eb514cf70eea305513bb418afa89c8bfa5179c67822380e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:33:17Z is after 2025-08-24T17:21:41Z" Dec 06 15:33:17 crc kubenswrapper[5003]: I1206 15:33:17.411253 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c48c2555-6e1a-4e2a-801c-de22b016a80d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d0880a937c71f4a7b46ffc4dcc10f50f5db23655dedb5d7e95b36cf0fcb44928\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f5d5d47cea1d06df2f36a0a15126c2419e5051d84842c781921577bec3c65f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5e7cb74b8b6e2a00de6dbbd1689b52fcc3ae9862d40685bab7a805646abd4a3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3377599a5ceebbce7bd9d45a6f78122fc48d86ff917a6dcfc03304ca3361659\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:02Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:33:17Z is after 2025-08-24T17:21:41Z" Dec 06 15:33:17 crc kubenswrapper[5003]: I1206 15:33:17.472916 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:17 crc kubenswrapper[5003]: I1206 15:33:17.472971 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:17 crc kubenswrapper[5003]: I1206 15:33:17.472988 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:17 crc kubenswrapper[5003]: I1206 15:33:17.473007 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:17 crc kubenswrapper[5003]: I1206 15:33:17.473019 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:17Z","lastTransitionTime":"2025-12-06T15:33:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:17 crc kubenswrapper[5003]: I1206 15:33:17.576352 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:17 crc kubenswrapper[5003]: I1206 15:33:17.576422 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:17 crc kubenswrapper[5003]: I1206 15:33:17.576435 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:17 crc kubenswrapper[5003]: I1206 15:33:17.576458 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:17 crc kubenswrapper[5003]: I1206 15:33:17.576471 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:17Z","lastTransitionTime":"2025-12-06T15:33:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:17 crc kubenswrapper[5003]: I1206 15:33:17.679173 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:17 crc kubenswrapper[5003]: I1206 15:33:17.679243 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:17 crc kubenswrapper[5003]: I1206 15:33:17.679254 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:17 crc kubenswrapper[5003]: I1206 15:33:17.679272 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:17 crc kubenswrapper[5003]: I1206 15:33:17.679283 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:17Z","lastTransitionTime":"2025-12-06T15:33:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:17 crc kubenswrapper[5003]: I1206 15:33:17.712061 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 06 15:33:17 crc kubenswrapper[5003]: I1206 15:33:17.712081 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 06 15:33:17 crc kubenswrapper[5003]: E1206 15:33:17.712195 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 06 15:33:17 crc kubenswrapper[5003]: E1206 15:33:17.712326 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 06 15:33:17 crc kubenswrapper[5003]: I1206 15:33:17.712349 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 06 15:33:17 crc kubenswrapper[5003]: E1206 15:33:17.712427 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 06 15:33:17 crc kubenswrapper[5003]: I1206 15:33:17.781576 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:17 crc kubenswrapper[5003]: I1206 15:33:17.781620 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:17 crc kubenswrapper[5003]: I1206 15:33:17.781630 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:17 crc kubenswrapper[5003]: I1206 15:33:17.781646 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:17 crc kubenswrapper[5003]: I1206 15:33:17.781659 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:17Z","lastTransitionTime":"2025-12-06T15:33:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:17 crc kubenswrapper[5003]: I1206 15:33:17.884120 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:17 crc kubenswrapper[5003]: I1206 15:33:17.884172 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:17 crc kubenswrapper[5003]: I1206 15:33:17.884186 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:17 crc kubenswrapper[5003]: I1206 15:33:17.884204 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:17 crc kubenswrapper[5003]: I1206 15:33:17.884247 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:17Z","lastTransitionTime":"2025-12-06T15:33:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:17 crc kubenswrapper[5003]: I1206 15:33:17.987346 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:17 crc kubenswrapper[5003]: I1206 15:33:17.987406 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:17 crc kubenswrapper[5003]: I1206 15:33:17.987416 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:17 crc kubenswrapper[5003]: I1206 15:33:17.987434 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:17 crc kubenswrapper[5003]: I1206 15:33:17.987444 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:17Z","lastTransitionTime":"2025-12-06T15:33:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:18 crc kubenswrapper[5003]: I1206 15:33:18.090533 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:18 crc kubenswrapper[5003]: I1206 15:33:18.090573 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:18 crc kubenswrapper[5003]: I1206 15:33:18.090585 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:18 crc kubenswrapper[5003]: I1206 15:33:18.090600 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:18 crc kubenswrapper[5003]: I1206 15:33:18.090610 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:18Z","lastTransitionTime":"2025-12-06T15:33:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:18 crc kubenswrapper[5003]: I1206 15:33:18.192624 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:18 crc kubenswrapper[5003]: I1206 15:33:18.192680 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:18 crc kubenswrapper[5003]: I1206 15:33:18.192689 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:18 crc kubenswrapper[5003]: I1206 15:33:18.192705 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:18 crc kubenswrapper[5003]: I1206 15:33:18.192715 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:18Z","lastTransitionTime":"2025-12-06T15:33:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:18 crc kubenswrapper[5003]: I1206 15:33:18.295160 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:18 crc kubenswrapper[5003]: I1206 15:33:18.295206 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:18 crc kubenswrapper[5003]: I1206 15:33:18.295217 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:18 crc kubenswrapper[5003]: I1206 15:33:18.295233 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:18 crc kubenswrapper[5003]: I1206 15:33:18.295245 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:18Z","lastTransitionTime":"2025-12-06T15:33:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:18 crc kubenswrapper[5003]: I1206 15:33:18.398257 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:18 crc kubenswrapper[5003]: I1206 15:33:18.398325 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:18 crc kubenswrapper[5003]: I1206 15:33:18.398343 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:18 crc kubenswrapper[5003]: I1206 15:33:18.398371 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:18 crc kubenswrapper[5003]: I1206 15:33:18.398390 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:18Z","lastTransitionTime":"2025-12-06T15:33:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:18 crc kubenswrapper[5003]: I1206 15:33:18.504461 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:18 crc kubenswrapper[5003]: I1206 15:33:18.504567 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:18 crc kubenswrapper[5003]: I1206 15:33:18.504586 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:18 crc kubenswrapper[5003]: I1206 15:33:18.504621 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:18 crc kubenswrapper[5003]: I1206 15:33:18.504644 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:18Z","lastTransitionTime":"2025-12-06T15:33:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:18 crc kubenswrapper[5003]: I1206 15:33:18.610915 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:18 crc kubenswrapper[5003]: I1206 15:33:18.610959 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:18 crc kubenswrapper[5003]: I1206 15:33:18.610970 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:18 crc kubenswrapper[5003]: I1206 15:33:18.610985 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:18 crc kubenswrapper[5003]: I1206 15:33:18.610996 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:18Z","lastTransitionTime":"2025-12-06T15:33:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:18 crc kubenswrapper[5003]: I1206 15:33:18.711869 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-jmzd9" Dec 06 15:33:18 crc kubenswrapper[5003]: E1206 15:33:18.712028 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-jmzd9" podUID="9fa121e1-7f2f-4912-945f-86cb199c3014" Dec 06 15:33:18 crc kubenswrapper[5003]: I1206 15:33:18.713665 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:18 crc kubenswrapper[5003]: I1206 15:33:18.713701 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:18 crc kubenswrapper[5003]: I1206 15:33:18.713711 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:18 crc kubenswrapper[5003]: I1206 15:33:18.713725 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:18 crc kubenswrapper[5003]: I1206 15:33:18.713734 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:18Z","lastTransitionTime":"2025-12-06T15:33:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:18 crc kubenswrapper[5003]: I1206 15:33:18.816496 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:18 crc kubenswrapper[5003]: I1206 15:33:18.816546 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:18 crc kubenswrapper[5003]: I1206 15:33:18.816554 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:18 crc kubenswrapper[5003]: I1206 15:33:18.816569 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:18 crc kubenswrapper[5003]: I1206 15:33:18.816578 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:18Z","lastTransitionTime":"2025-12-06T15:33:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:18 crc kubenswrapper[5003]: I1206 15:33:18.919577 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:18 crc kubenswrapper[5003]: I1206 15:33:18.919644 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:18 crc kubenswrapper[5003]: I1206 15:33:18.919658 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:18 crc kubenswrapper[5003]: I1206 15:33:18.919683 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:18 crc kubenswrapper[5003]: I1206 15:33:18.919696 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:18Z","lastTransitionTime":"2025-12-06T15:33:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:19 crc kubenswrapper[5003]: I1206 15:33:19.023347 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:19 crc kubenswrapper[5003]: I1206 15:33:19.023387 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:19 crc kubenswrapper[5003]: I1206 15:33:19.023395 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:19 crc kubenswrapper[5003]: I1206 15:33:19.023413 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:19 crc kubenswrapper[5003]: I1206 15:33:19.023424 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:19Z","lastTransitionTime":"2025-12-06T15:33:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:19 crc kubenswrapper[5003]: I1206 15:33:19.126909 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:19 crc kubenswrapper[5003]: I1206 15:33:19.126961 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:19 crc kubenswrapper[5003]: I1206 15:33:19.126976 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:19 crc kubenswrapper[5003]: I1206 15:33:19.126996 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:19 crc kubenswrapper[5003]: I1206 15:33:19.127009 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:19Z","lastTransitionTime":"2025-12-06T15:33:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:19 crc kubenswrapper[5003]: I1206 15:33:19.229196 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:19 crc kubenswrapper[5003]: I1206 15:33:19.229245 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:19 crc kubenswrapper[5003]: I1206 15:33:19.229257 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:19 crc kubenswrapper[5003]: I1206 15:33:19.229273 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:19 crc kubenswrapper[5003]: I1206 15:33:19.229287 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:19Z","lastTransitionTime":"2025-12-06T15:33:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:19 crc kubenswrapper[5003]: I1206 15:33:19.332344 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:19 crc kubenswrapper[5003]: I1206 15:33:19.332410 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:19 crc kubenswrapper[5003]: I1206 15:33:19.332422 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:19 crc kubenswrapper[5003]: I1206 15:33:19.332442 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:19 crc kubenswrapper[5003]: I1206 15:33:19.332456 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:19Z","lastTransitionTime":"2025-12-06T15:33:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:19 crc kubenswrapper[5003]: I1206 15:33:19.435183 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:19 crc kubenswrapper[5003]: I1206 15:33:19.435250 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:19 crc kubenswrapper[5003]: I1206 15:33:19.435261 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:19 crc kubenswrapper[5003]: I1206 15:33:19.435279 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:19 crc kubenswrapper[5003]: I1206 15:33:19.435293 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:19Z","lastTransitionTime":"2025-12-06T15:33:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:19 crc kubenswrapper[5003]: I1206 15:33:19.538908 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:19 crc kubenswrapper[5003]: I1206 15:33:19.538972 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:19 crc kubenswrapper[5003]: I1206 15:33:19.538991 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:19 crc kubenswrapper[5003]: I1206 15:33:19.539020 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:19 crc kubenswrapper[5003]: I1206 15:33:19.539038 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:19Z","lastTransitionTime":"2025-12-06T15:33:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:19 crc kubenswrapper[5003]: I1206 15:33:19.641794 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:19 crc kubenswrapper[5003]: I1206 15:33:19.641919 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:19 crc kubenswrapper[5003]: I1206 15:33:19.641942 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:19 crc kubenswrapper[5003]: I1206 15:33:19.641973 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:19 crc kubenswrapper[5003]: I1206 15:33:19.641998 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:19Z","lastTransitionTime":"2025-12-06T15:33:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:19 crc kubenswrapper[5003]: I1206 15:33:19.711843 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 06 15:33:19 crc kubenswrapper[5003]: E1206 15:33:19.711980 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 06 15:33:19 crc kubenswrapper[5003]: I1206 15:33:19.712188 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 06 15:33:19 crc kubenswrapper[5003]: E1206 15:33:19.712248 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 06 15:33:19 crc kubenswrapper[5003]: I1206 15:33:19.712387 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 06 15:33:19 crc kubenswrapper[5003]: E1206 15:33:19.712457 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 06 15:33:19 crc kubenswrapper[5003]: I1206 15:33:19.744932 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:19 crc kubenswrapper[5003]: I1206 15:33:19.744984 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:19 crc kubenswrapper[5003]: I1206 15:33:19.744996 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:19 crc kubenswrapper[5003]: I1206 15:33:19.745016 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:19 crc kubenswrapper[5003]: I1206 15:33:19.745044 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:19Z","lastTransitionTime":"2025-12-06T15:33:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:19 crc kubenswrapper[5003]: I1206 15:33:19.847980 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:19 crc kubenswrapper[5003]: I1206 15:33:19.848028 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:19 crc kubenswrapper[5003]: I1206 15:33:19.848040 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:19 crc kubenswrapper[5003]: I1206 15:33:19.848059 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:19 crc kubenswrapper[5003]: I1206 15:33:19.848075 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:19Z","lastTransitionTime":"2025-12-06T15:33:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:19 crc kubenswrapper[5003]: I1206 15:33:19.950426 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:19 crc kubenswrapper[5003]: I1206 15:33:19.950479 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:19 crc kubenswrapper[5003]: I1206 15:33:19.950537 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:19 crc kubenswrapper[5003]: I1206 15:33:19.950557 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:19 crc kubenswrapper[5003]: I1206 15:33:19.950568 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:19Z","lastTransitionTime":"2025-12-06T15:33:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:20 crc kubenswrapper[5003]: I1206 15:33:20.053692 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:20 crc kubenswrapper[5003]: I1206 15:33:20.053740 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:20 crc kubenswrapper[5003]: I1206 15:33:20.053750 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:20 crc kubenswrapper[5003]: I1206 15:33:20.053765 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:20 crc kubenswrapper[5003]: I1206 15:33:20.053777 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:20Z","lastTransitionTime":"2025-12-06T15:33:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:20 crc kubenswrapper[5003]: I1206 15:33:20.156601 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:20 crc kubenswrapper[5003]: I1206 15:33:20.156664 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:20 crc kubenswrapper[5003]: I1206 15:33:20.156680 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:20 crc kubenswrapper[5003]: I1206 15:33:20.156703 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:20 crc kubenswrapper[5003]: I1206 15:33:20.156720 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:20Z","lastTransitionTime":"2025-12-06T15:33:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:20 crc kubenswrapper[5003]: I1206 15:33:20.260314 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:20 crc kubenswrapper[5003]: I1206 15:33:20.260369 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:20 crc kubenswrapper[5003]: I1206 15:33:20.260382 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:20 crc kubenswrapper[5003]: I1206 15:33:20.260404 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:20 crc kubenswrapper[5003]: I1206 15:33:20.260416 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:20Z","lastTransitionTime":"2025-12-06T15:33:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:20 crc kubenswrapper[5003]: I1206 15:33:20.363176 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:20 crc kubenswrapper[5003]: I1206 15:33:20.363228 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:20 crc kubenswrapper[5003]: I1206 15:33:20.363239 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:20 crc kubenswrapper[5003]: I1206 15:33:20.363259 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:20 crc kubenswrapper[5003]: I1206 15:33:20.363272 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:20Z","lastTransitionTime":"2025-12-06T15:33:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:20 crc kubenswrapper[5003]: I1206 15:33:20.466702 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:20 crc kubenswrapper[5003]: I1206 15:33:20.466750 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:20 crc kubenswrapper[5003]: I1206 15:33:20.466762 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:20 crc kubenswrapper[5003]: I1206 15:33:20.466779 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:20 crc kubenswrapper[5003]: I1206 15:33:20.466789 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:20Z","lastTransitionTime":"2025-12-06T15:33:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:20 crc kubenswrapper[5003]: I1206 15:33:20.569695 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:20 crc kubenswrapper[5003]: I1206 15:33:20.569733 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:20 crc kubenswrapper[5003]: I1206 15:33:20.569741 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:20 crc kubenswrapper[5003]: I1206 15:33:20.569756 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:20 crc kubenswrapper[5003]: I1206 15:33:20.569766 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:20Z","lastTransitionTime":"2025-12-06T15:33:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:20 crc kubenswrapper[5003]: I1206 15:33:20.672625 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:20 crc kubenswrapper[5003]: I1206 15:33:20.672688 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:20 crc kubenswrapper[5003]: I1206 15:33:20.672706 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:20 crc kubenswrapper[5003]: I1206 15:33:20.672729 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:20 crc kubenswrapper[5003]: I1206 15:33:20.672747 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:20Z","lastTransitionTime":"2025-12-06T15:33:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:20 crc kubenswrapper[5003]: I1206 15:33:20.712085 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-jmzd9" Dec 06 15:33:20 crc kubenswrapper[5003]: E1206 15:33:20.712756 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-jmzd9" podUID="9fa121e1-7f2f-4912-945f-86cb199c3014" Dec 06 15:33:20 crc kubenswrapper[5003]: I1206 15:33:20.712964 5003 scope.go:117] "RemoveContainer" containerID="2f0ad17b1a040807cb7cca5ee53ef54da1870f63cadccb3aa20421b08016cf10" Dec 06 15:33:20 crc kubenswrapper[5003]: I1206 15:33:20.744574 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd/etcd-crc"] Dec 06 15:33:20 crc kubenswrapper[5003]: I1206 15:33:20.775550 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:20 crc kubenswrapper[5003]: I1206 15:33:20.775611 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:20 crc kubenswrapper[5003]: I1206 15:33:20.775631 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:20 crc kubenswrapper[5003]: I1206 15:33:20.775654 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:20 crc kubenswrapper[5003]: I1206 15:33:20.775672 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:20Z","lastTransitionTime":"2025-12-06T15:33:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:20 crc kubenswrapper[5003]: I1206 15:33:20.878210 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:20 crc kubenswrapper[5003]: I1206 15:33:20.878240 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:20 crc kubenswrapper[5003]: I1206 15:33:20.878252 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:20 crc kubenswrapper[5003]: I1206 15:33:20.878265 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:20 crc kubenswrapper[5003]: I1206 15:33:20.878273 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:20Z","lastTransitionTime":"2025-12-06T15:33:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:20 crc kubenswrapper[5003]: I1206 15:33:20.985963 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:20 crc kubenswrapper[5003]: I1206 15:33:20.986177 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:20 crc kubenswrapper[5003]: I1206 15:33:20.986194 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:20 crc kubenswrapper[5003]: I1206 15:33:20.986212 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:20 crc kubenswrapper[5003]: I1206 15:33:20.986224 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:20Z","lastTransitionTime":"2025-12-06T15:33:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:21 crc kubenswrapper[5003]: I1206 15:33:21.089039 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:21 crc kubenswrapper[5003]: I1206 15:33:21.089088 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:21 crc kubenswrapper[5003]: I1206 15:33:21.089100 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:21 crc kubenswrapper[5003]: I1206 15:33:21.089117 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:21 crc kubenswrapper[5003]: I1206 15:33:21.089129 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:21Z","lastTransitionTime":"2025-12-06T15:33:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:21 crc kubenswrapper[5003]: I1206 15:33:21.162989 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-p7xwd_8a695d94-271c-45bc-8a89-dfdecb57ec00/ovnkube-controller/2.log" Dec 06 15:33:21 crc kubenswrapper[5003]: I1206 15:33:21.165952 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7xwd" event={"ID":"8a695d94-271c-45bc-8a89-dfdecb57ec00","Type":"ContainerStarted","Data":"9517eb57092dd5943648e1004f42c5da7be2e94b4e0460d9595a63ef5194be22"} Dec 06 15:33:21 crc kubenswrapper[5003]: I1206 15:33:21.181721 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:33:21Z is after 2025-08-24T17:21:41Z" Dec 06 15:33:21 crc kubenswrapper[5003]: I1206 15:33:21.190952 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-qcqkl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"956ed4a8-8918-48eb-a2f8-f35a9ae17fde\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8ae5ee47d26422cffca5f12a4ee1455dcd34c148b1b490d69b88280b4497da11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbhrw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-qcqkl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:33:21Z is after 2025-08-24T17:21:41Z" Dec 06 15:33:21 crc kubenswrapper[5003]: I1206 15:33:21.191866 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:21 crc kubenswrapper[5003]: I1206 15:33:21.191901 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:21 crc kubenswrapper[5003]: I1206 15:33:21.191909 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:21 crc kubenswrapper[5003]: I1206 15:33:21.191926 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:21 crc kubenswrapper[5003]: I1206 15:33:21.191935 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:21Z","lastTransitionTime":"2025-12-06T15:33:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:21 crc kubenswrapper[5003]: I1206 15:33:21.203282 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-9kdpn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"350e8b9a-b7bf-4dc9-abe9-d10f7a088be3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:33:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:33:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22d16feb3425c5cac7562c4468723b0aae567d2d31db5516b3b0ce7d38d91c6b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e9b32883e20da5b298374f7eebd0a21c5ca51cca23543fc78dfe3edb04e3b661\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-06T15:33:15Z\\\",\\\"message\\\":\\\"2025-12-06T15:32:30+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_572b1774-f930-470b-aa25-26a311d76ba2\\\\n2025-12-06T15:32:30+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_572b1774-f930-470b-aa25-26a311d76ba2 to /host/opt/cni/bin/\\\\n2025-12-06T15:32:30Z [verbose] multus-daemon started\\\\n2025-12-06T15:32:30Z [verbose] Readiness Indicator file check\\\\n2025-12-06T15:33:15Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:28Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:33:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-46c8r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-9kdpn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:33:21Z is after 2025-08-24T17:21:41Z" Dec 06 15:33:21 crc kubenswrapper[5003]: I1206 15:33:21.246016 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-jmzd9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9fa121e1-7f2f-4912-945f-86cb199c3014\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:41Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:41Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9s4lw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9s4lw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:41Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-jmzd9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:33:21Z is after 2025-08-24T17:21:41Z" Dec 06 15:33:21 crc kubenswrapper[5003]: I1206 15:33:21.259011 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f25cfd72-ae1c-45c6-bf50-3f3cd455c1ad\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://15818f84cfa472a42a18bafe5ff4a71da326b2f5871f47e693d5e1a1b3c8b986\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8c1591171f15bdf52339cb914e52de4dad9c34f1a6b6bb882f15bb41308a5b7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://94df3e8c0295aedc3bf7b97296d443b5240d17fcd83f8e8cf1bc9730740d7f29\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3bd16db35730810799b00301bb8a68f91d69dc93dac04638d5c187bdd374393d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3bd16db35730810799b00301bb8a68f91d69dc93dac04638d5c187bdd374393d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:02Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:02Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:33:21Z is after 2025-08-24T17:21:41Z" Dec 06 15:33:21 crc kubenswrapper[5003]: I1206 15:33:21.278420 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:33:21Z is after 2025-08-24T17:21:41Z" Dec 06 15:33:21 crc kubenswrapper[5003]: I1206 15:33:21.288408 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-mdz5n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2a9b6e3b-054f-40de-9ad6-dd7eb78eaea1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cac97fc8a54c2fbd81b0abade11a987e5226084d8a4f75ddbbaad1f6c4cb9783\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wqmpg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:25Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-mdz5n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:33:21Z is after 2025-08-24T17:21:41Z" Dec 06 15:33:21 crc kubenswrapper[5003]: I1206 15:33:21.294129 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:21 crc kubenswrapper[5003]: I1206 15:33:21.294168 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:21 crc kubenswrapper[5003]: I1206 15:33:21.294179 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:21 crc kubenswrapper[5003]: I1206 15:33:21.294198 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:21 crc kubenswrapper[5003]: I1206 15:33:21.294210 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:21Z","lastTransitionTime":"2025-12-06T15:33:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:21 crc kubenswrapper[5003]: I1206 15:33:21.307256 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-w25db" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1a047c4d-003e-4668-9b96-945eab34ab68\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b3a6db6c2c8e6a283f7b46ef28ba7310d3cfc7054cfd8cbcc290ebd0d26dcf4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6dn2k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b457ba1d45f5e1524b8539c4c7dc77b24f6d5aa8172b46962e31e4fb6723b7ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6dn2k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:26Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-w25db\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:33:21Z is after 2025-08-24T17:21:41Z" Dec 06 15:33:21 crc kubenswrapper[5003]: I1206 15:33:21.326874 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-7788j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f27a64b8-ecd1-4201-a3bd-a4f5a0aa05a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4bf147399c10dd7e654abd5213c4d90e8aa9feca7f8c6032c16576a25aeace68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twj7c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f47c00cc6d341dbe1a5f3495f04fe4e695370952f6d6b209a5aaff1ace9d17e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twj7c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:39Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-7788j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:33:21Z is after 2025-08-24T17:21:41Z" Dec 06 15:33:21 crc kubenswrapper[5003]: I1206 15:33:21.339884 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5ae2d36f-5a31-4da3-aae8-0378c481f230\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9f683cdba22afa5d1d069cc32c6c83addf344c8b0ba3b39b7a2ca527408922c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c777cc5d07ff005aa8001da8a566484710f0253e4a6061e3297a884c6153a7d0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ac8291e7fa9a5ff379474802b6eae771542705aef4c443cc3123837d10dd9e5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae2215e89d65c7bd69288cda74cdd83f0349f57b47f80ff1fa03d60a484558b5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://efc65bc9bb60202069cb51eed80fb62e527399c4189554e791bab3e23993c95c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-06T15:32:20Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1206 15:32:14.957831 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1206 15:32:14.959306 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1213261896/tls.crt::/tmp/serving-cert-1213261896/tls.key\\\\\\\"\\\\nI1206 15:32:20.684925 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1206 15:32:20.690335 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1206 15:32:20.690422 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1206 15:32:20.690568 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1206 15:32:20.690599 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1206 15:32:20.700990 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1206 15:32:20.701016 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1206 15:32:20.701048 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1206 15:32:20.701062 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1206 15:32:20.701075 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1206 15:32:20.701085 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1206 15:32:20.701093 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1206 15:32:20.701100 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1206 15:32:20.702607 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:04Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6013b3a95ef99138e6ea971b51a45f65923c09435a2595bba7af91eebb28d76\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4256540a96bdcd018eb514cf70eea305513bb418afa89c8bfa5179c67822380e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4256540a96bdcd018eb514cf70eea305513bb418afa89c8bfa5179c67822380e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:33:21Z is after 2025-08-24T17:21:41Z" Dec 06 15:33:21 crc kubenswrapper[5003]: I1206 15:33:21.362834 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6825575f-413b-4b43-8b6a-34e95d3de438\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c340e7bd862f75d1d2d720236b9938e0749dc5089a0edf227045408ea8aa34a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b526ac47de443d35197518b6b4636600747093ce094cd30138d6b086b0f7da5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://269db465fc9bbf92ebd14dfe26ad8e6c24df17e114552c436de2176f1a58d326\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://52de3cb379ae4baa685f8adb4b0cbc848ac1823dd1c3e3e84df47c83bf5ef898\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://34f40b931fd028c9fd16760831dc5bcf1043536e93452caacf8ac5d1bf59d1de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bd8ae927f5dc1164e0be3c62baac330d746d3dae3167cb971054c6a3f50ce345\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bd8ae927f5dc1164e0be3c62baac330d746d3dae3167cb971054c6a3f50ce345\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fdc40421549e4a1452b2c3d3ac2cdf29143dae118af75b8049d0e455c1d5d256\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fdc40421549e4a1452b2c3d3ac2cdf29143dae118af75b8049d0e455c1d5d256\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:04Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://72ad699d40d6bbae37ece0be57e00cf8198c543af71475186ba6bca233e19c59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://72ad699d40d6bbae37ece0be57e00cf8198c543af71475186ba6bca233e19c59\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:02Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:33:21Z is after 2025-08-24T17:21:41Z" Dec 06 15:33:21 crc kubenswrapper[5003]: I1206 15:33:21.379224 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c48c2555-6e1a-4e2a-801c-de22b016a80d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d0880a937c71f4a7b46ffc4dcc10f50f5db23655dedb5d7e95b36cf0fcb44928\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f5d5d47cea1d06df2f36a0a15126c2419e5051d84842c781921577bec3c65f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5e7cb74b8b6e2a00de6dbbd1689b52fcc3ae9862d40685bab7a805646abd4a3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3377599a5ceebbce7bd9d45a6f78122fc48d86ff917a6dcfc03304ca3361659\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:02Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:33:21Z is after 2025-08-24T17:21:41Z" Dec 06 15:33:21 crc kubenswrapper[5003]: I1206 15:33:21.392451 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa744e792aa97558246eaa95c80a307138b1a6e08fe5de144b3fff62f3932e20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:33:21Z is after 2025-08-24T17:21:41Z" Dec 06 15:33:21 crc kubenswrapper[5003]: I1206 15:33:21.396792 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:21 crc kubenswrapper[5003]: I1206 15:33:21.396849 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:21 crc kubenswrapper[5003]: I1206 15:33:21.396862 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:21 crc kubenswrapper[5003]: I1206 15:33:21.396878 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:21 crc kubenswrapper[5003]: I1206 15:33:21.396891 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:21Z","lastTransitionTime":"2025-12-06T15:33:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:21 crc kubenswrapper[5003]: I1206 15:33:21.407911 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://224b845cf75da20cf87e5435770288110776a0edd92c9c110d6dc539419ecb7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0df37db3b8682032a0065b4ca1cead69bf0b32cb4e2fab89c53e14eb2169b7e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:33:21Z is after 2025-08-24T17:21:41Z" Dec 06 15:33:21 crc kubenswrapper[5003]: I1206 15:33:21.423782 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-j4rf7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7dc41f9e-e763-4a9a-a064-f65bc24332b9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://467dcc8ed3936c6fb24f6c6c3e42eccf3597ce6920d12b253946a123ee22faf8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://79eb73c778f65e6694b0a3243db40d605557c86145f9b40ce2dfcf922ec7f38f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://79eb73c778f65e6694b0a3243db40d605557c86145f9b40ce2dfcf922ec7f38f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://10cf47f05fda3c702decadc5c1cf0b6fb4df56993610f4d7bc2809e685109f6c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://10cf47f05fda3c702decadc5c1cf0b6fb4df56993610f4d7bc2809e685109f6c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a33d5c66fd9234a89ff1f28e4863f80104b5d1caccd24cafa22a4598a42906c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a33d5c66fd9234a89ff1f28e4863f80104b5d1caccd24cafa22a4598a42906c3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://895f9f2647488793d0875e8cb05a6e6515733dae00a856e6a4705702adc3a5ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://895f9f2647488793d0875e8cb05a6e6515733dae00a856e6a4705702adc3a5ed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a02f680341e0dfb569ffcd7902d5b1fe3d575a423ff541d86c7bdd4d76b733b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a02f680341e0dfb569ffcd7902d5b1fe3d575a423ff541d86c7bdd4d76b733b0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f6cda90c763eedc34903164d30c28d3cb696f658455d0a972fbf8d8e5505a587\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f6cda90c763eedc34903164d30c28d3cb696f658455d0a972fbf8d8e5505a587\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-j4rf7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:33:21Z is after 2025-08-24T17:21:41Z" Dec 06 15:33:21 crc kubenswrapper[5003]: I1206 15:33:21.446724 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:33:21Z is after 2025-08-24T17:21:41Z" Dec 06 15:33:21 crc kubenswrapper[5003]: I1206 15:33:21.459562 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:24Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:24Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ab9b858ed30211c345e146452a18252752eeedcb3be8054daa2af371297cbb61\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:33:21Z is after 2025-08-24T17:21:41Z" Dec 06 15:33:21 crc kubenswrapper[5003]: I1206 15:33:21.482045 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7xwd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a695d94-271c-45bc-8a89-dfdecb57ec00\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b26a01a6a0644d88028ebc041e00353091f26407f109e02e4aec1ec0e382a4c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b549c3d1ba39068a4f1ce344e0807eed3b4e73ee1902d02cebffe005697201d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5240dcf05f3d661b92408f4c39999571d023d9acdeb31d4a4a99f50e31a627b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e240d517f92fb2b8e24bb6f1c1d98869b41adf8bc3ab687a4426840c7010235e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b871ea5e9d2093579af10786c02da9d7f5afa5351933be742b67b1b682733119\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://276d6d2f680f4e0b2038d12b161e3fd3f85417b5d776b9661559b4812ead1dd9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9517eb57092dd5943648e1004f42c5da7be2e94b4e0460d9595a63ef5194be22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2f0ad17b1a040807cb7cca5ee53ef54da1870f63cadccb3aa20421b08016cf10\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-06T15:32:53Z\\\",\\\"message\\\":\\\"d44-bbd8-dba87b7dbaf0}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1206 15:32:53.311445 6646 ovn.go:134] Ensuring zone local for Pod openshift-multus/multus-additional-cni-plugins-j4rf7 in node crc\\\\nI1206 15:32:53.311457 6646 obj_retry.go:386] Retry successful for *v1.Pod openshift-multus/multus-additional-cni-plugins-j4rf7 after 0 failed attempt(s)\\\\nI1206 15:32:53.311473 6646 default_network_controller.go:776] Recording success event on pod openshift-multus/multus-additional-cni-plugins-j4rf7\\\\nF1206 15:32:53.311504 6646 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:53Z is after 2025-08-24\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:51Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:33:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c56c8c7aa570bb81c41923b20b09d6de6b278e56ca466e7e0ee3cecf113c37d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a00902f2c2c7ca56d86553f78094f40f59ea9ef125f5a388aafd6d7fd0c20d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9a00902f2c2c7ca56d86553f78094f40f59ea9ef125f5a388aafd6d7fd0c20d9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-p7xwd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:33:21Z is after 2025-08-24T17:21:41Z" Dec 06 15:33:21 crc kubenswrapper[5003]: I1206 15:33:21.512003 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:21 crc kubenswrapper[5003]: I1206 15:33:21.512047 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:21 crc kubenswrapper[5003]: I1206 15:33:21.512058 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:21 crc kubenswrapper[5003]: I1206 15:33:21.512075 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:21 crc kubenswrapper[5003]: I1206 15:33:21.512087 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:21Z","lastTransitionTime":"2025-12-06T15:33:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:21 crc kubenswrapper[5003]: I1206 15:33:21.711490 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 06 15:33:21 crc kubenswrapper[5003]: I1206 15:33:21.711572 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 06 15:33:21 crc kubenswrapper[5003]: E1206 15:33:21.711640 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 06 15:33:21 crc kubenswrapper[5003]: E1206 15:33:21.711710 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 06 15:33:21 crc kubenswrapper[5003]: I1206 15:33:21.711834 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 06 15:33:21 crc kubenswrapper[5003]: E1206 15:33:21.711908 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 06 15:33:21 crc kubenswrapper[5003]: I1206 15:33:21.726308 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-9kdpn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"350e8b9a-b7bf-4dc9-abe9-d10f7a088be3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:33:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:33:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22d16feb3425c5cac7562c4468723b0aae567d2d31db5516b3b0ce7d38d91c6b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e9b32883e20da5b298374f7eebd0a21c5ca51cca23543fc78dfe3edb04e3b661\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-06T15:33:15Z\\\",\\\"message\\\":\\\"2025-12-06T15:32:30+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_572b1774-f930-470b-aa25-26a311d76ba2\\\\n2025-12-06T15:32:30+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_572b1774-f930-470b-aa25-26a311d76ba2 to /host/opt/cni/bin/\\\\n2025-12-06T15:32:30Z [verbose] multus-daemon started\\\\n2025-12-06T15:32:30Z [verbose] Readiness Indicator file check\\\\n2025-12-06T15:33:15Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:28Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:33:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-46c8r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-9kdpn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:33:21Z is after 2025-08-24T17:21:41Z" Dec 06 15:33:21 crc kubenswrapper[5003]: I1206 15:33:21.738253 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-jmzd9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9fa121e1-7f2f-4912-945f-86cb199c3014\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:41Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:41Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9s4lw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9s4lw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:41Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-jmzd9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:33:21Z is after 2025-08-24T17:21:41Z" Dec 06 15:33:21 crc kubenswrapper[5003]: I1206 15:33:21.752407 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:33:21Z is after 2025-08-24T17:21:41Z" Dec 06 15:33:21 crc kubenswrapper[5003]: I1206 15:33:21.764231 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-qcqkl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"956ed4a8-8918-48eb-a2f8-f35a9ae17fde\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8ae5ee47d26422cffca5f12a4ee1455dcd34c148b1b490d69b88280b4497da11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbhrw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-qcqkl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:33:21Z is after 2025-08-24T17:21:41Z" Dec 06 15:33:21 crc kubenswrapper[5003]: I1206 15:33:21.765394 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:21 crc kubenswrapper[5003]: I1206 15:33:21.765419 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:21 crc kubenswrapper[5003]: I1206 15:33:21.765430 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:21 crc kubenswrapper[5003]: I1206 15:33:21.765446 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:21 crc kubenswrapper[5003]: I1206 15:33:21.765458 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:21Z","lastTransitionTime":"2025-12-06T15:33:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:21 crc kubenswrapper[5003]: I1206 15:33:21.791540 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-w25db" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1a047c4d-003e-4668-9b96-945eab34ab68\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b3a6db6c2c8e6a283f7b46ef28ba7310d3cfc7054cfd8cbcc290ebd0d26dcf4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6dn2k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b457ba1d45f5e1524b8539c4c7dc77b24f6d5aa8172b46962e31e4fb6723b7ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6dn2k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:26Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-w25db\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:33:21Z is after 2025-08-24T17:21:41Z" Dec 06 15:33:21 crc kubenswrapper[5003]: I1206 15:33:21.803054 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-7788j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f27a64b8-ecd1-4201-a3bd-a4f5a0aa05a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4bf147399c10dd7e654abd5213c4d90e8aa9feca7f8c6032c16576a25aeace68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twj7c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f47c00cc6d341dbe1a5f3495f04fe4e695370952f6d6b209a5aaff1ace9d17e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twj7c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:39Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-7788j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:33:21Z is after 2025-08-24T17:21:41Z" Dec 06 15:33:21 crc kubenswrapper[5003]: I1206 15:33:21.828309 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f25cfd72-ae1c-45c6-bf50-3f3cd455c1ad\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://15818f84cfa472a42a18bafe5ff4a71da326b2f5871f47e693d5e1a1b3c8b986\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8c1591171f15bdf52339cb914e52de4dad9c34f1a6b6bb882f15bb41308a5b7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://94df3e8c0295aedc3bf7b97296d443b5240d17fcd83f8e8cf1bc9730740d7f29\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3bd16db35730810799b00301bb8a68f91d69dc93dac04638d5c187bdd374393d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3bd16db35730810799b00301bb8a68f91d69dc93dac04638d5c187bdd374393d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:02Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:02Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:33:21Z is after 2025-08-24T17:21:41Z" Dec 06 15:33:21 crc kubenswrapper[5003]: I1206 15:33:21.839734 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:33:21Z is after 2025-08-24T17:21:41Z" Dec 06 15:33:21 crc kubenswrapper[5003]: I1206 15:33:21.849099 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-mdz5n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2a9b6e3b-054f-40de-9ad6-dd7eb78eaea1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cac97fc8a54c2fbd81b0abade11a987e5226084d8a4f75ddbbaad1f6c4cb9783\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wqmpg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:25Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-mdz5n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:33:21Z is after 2025-08-24T17:21:41Z" Dec 06 15:33:21 crc kubenswrapper[5003]: I1206 15:33:21.861585 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa744e792aa97558246eaa95c80a307138b1a6e08fe5de144b3fff62f3932e20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:33:21Z is after 2025-08-24T17:21:41Z" Dec 06 15:33:21 crc kubenswrapper[5003]: I1206 15:33:21.876726 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:21 crc kubenswrapper[5003]: I1206 15:33:21.876750 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:21 crc kubenswrapper[5003]: I1206 15:33:21.876760 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:21 crc kubenswrapper[5003]: I1206 15:33:21.876772 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:21 crc kubenswrapper[5003]: I1206 15:33:21.876781 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:21Z","lastTransitionTime":"2025-12-06T15:33:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:21 crc kubenswrapper[5003]: I1206 15:33:21.877611 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://224b845cf75da20cf87e5435770288110776a0edd92c9c110d6dc539419ecb7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0df37db3b8682032a0065b4ca1cead69bf0b32cb4e2fab89c53e14eb2169b7e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:33:21Z is after 2025-08-24T17:21:41Z" Dec 06 15:33:21 crc kubenswrapper[5003]: I1206 15:33:21.905179 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-j4rf7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7dc41f9e-e763-4a9a-a064-f65bc24332b9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://467dcc8ed3936c6fb24f6c6c3e42eccf3597ce6920d12b253946a123ee22faf8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://79eb73c778f65e6694b0a3243db40d605557c86145f9b40ce2dfcf922ec7f38f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://79eb73c778f65e6694b0a3243db40d605557c86145f9b40ce2dfcf922ec7f38f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://10cf47f05fda3c702decadc5c1cf0b6fb4df56993610f4d7bc2809e685109f6c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://10cf47f05fda3c702decadc5c1cf0b6fb4df56993610f4d7bc2809e685109f6c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a33d5c66fd9234a89ff1f28e4863f80104b5d1caccd24cafa22a4598a42906c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a33d5c66fd9234a89ff1f28e4863f80104b5d1caccd24cafa22a4598a42906c3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://895f9f2647488793d0875e8cb05a6e6515733dae00a856e6a4705702adc3a5ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://895f9f2647488793d0875e8cb05a6e6515733dae00a856e6a4705702adc3a5ed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a02f680341e0dfb569ffcd7902d5b1fe3d575a423ff541d86c7bdd4d76b733b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a02f680341e0dfb569ffcd7902d5b1fe3d575a423ff541d86c7bdd4d76b733b0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f6cda90c763eedc34903164d30c28d3cb696f658455d0a972fbf8d8e5505a587\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f6cda90c763eedc34903164d30c28d3cb696f658455d0a972fbf8d8e5505a587\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-j4rf7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:33:21Z is after 2025-08-24T17:21:41Z" Dec 06 15:33:21 crc kubenswrapper[5003]: I1206 15:33:21.933959 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5ae2d36f-5a31-4da3-aae8-0378c481f230\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9f683cdba22afa5d1d069cc32c6c83addf344c8b0ba3b39b7a2ca527408922c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c777cc5d07ff005aa8001da8a566484710f0253e4a6061e3297a884c6153a7d0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ac8291e7fa9a5ff379474802b6eae771542705aef4c443cc3123837d10dd9e5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae2215e89d65c7bd69288cda74cdd83f0349f57b47f80ff1fa03d60a484558b5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://efc65bc9bb60202069cb51eed80fb62e527399c4189554e791bab3e23993c95c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-06T15:32:20Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1206 15:32:14.957831 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1206 15:32:14.959306 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1213261896/tls.crt::/tmp/serving-cert-1213261896/tls.key\\\\\\\"\\\\nI1206 15:32:20.684925 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1206 15:32:20.690335 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1206 15:32:20.690422 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1206 15:32:20.690568 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1206 15:32:20.690599 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1206 15:32:20.700990 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1206 15:32:20.701016 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1206 15:32:20.701048 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1206 15:32:20.701062 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1206 15:32:20.701075 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1206 15:32:20.701085 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1206 15:32:20.701093 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1206 15:32:20.701100 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1206 15:32:20.702607 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:04Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6013b3a95ef99138e6ea971b51a45f65923c09435a2595bba7af91eebb28d76\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4256540a96bdcd018eb514cf70eea305513bb418afa89c8bfa5179c67822380e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4256540a96bdcd018eb514cf70eea305513bb418afa89c8bfa5179c67822380e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:33:21Z is after 2025-08-24T17:21:41Z" Dec 06 15:33:21 crc kubenswrapper[5003]: I1206 15:33:21.963057 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6825575f-413b-4b43-8b6a-34e95d3de438\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c340e7bd862f75d1d2d720236b9938e0749dc5089a0edf227045408ea8aa34a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b526ac47de443d35197518b6b4636600747093ce094cd30138d6b086b0f7da5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://269db465fc9bbf92ebd14dfe26ad8e6c24df17e114552c436de2176f1a58d326\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://52de3cb379ae4baa685f8adb4b0cbc848ac1823dd1c3e3e84df47c83bf5ef898\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://34f40b931fd028c9fd16760831dc5bcf1043536e93452caacf8ac5d1bf59d1de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bd8ae927f5dc1164e0be3c62baac330d746d3dae3167cb971054c6a3f50ce345\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bd8ae927f5dc1164e0be3c62baac330d746d3dae3167cb971054c6a3f50ce345\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fdc40421549e4a1452b2c3d3ac2cdf29143dae118af75b8049d0e455c1d5d256\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fdc40421549e4a1452b2c3d3ac2cdf29143dae118af75b8049d0e455c1d5d256\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:04Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://72ad699d40d6bbae37ece0be57e00cf8198c543af71475186ba6bca233e19c59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://72ad699d40d6bbae37ece0be57e00cf8198c543af71475186ba6bca233e19c59\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:02Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:33:21Z is after 2025-08-24T17:21:41Z" Dec 06 15:33:21 crc kubenswrapper[5003]: I1206 15:33:21.974447 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c48c2555-6e1a-4e2a-801c-de22b016a80d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d0880a937c71f4a7b46ffc4dcc10f50f5db23655dedb5d7e95b36cf0fcb44928\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f5d5d47cea1d06df2f36a0a15126c2419e5051d84842c781921577bec3c65f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5e7cb74b8b6e2a00de6dbbd1689b52fcc3ae9862d40685bab7a805646abd4a3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3377599a5ceebbce7bd9d45a6f78122fc48d86ff917a6dcfc03304ca3361659\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:02Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:33:21Z is after 2025-08-24T17:21:41Z" Dec 06 15:33:21 crc kubenswrapper[5003]: I1206 15:33:21.978973 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:21 crc kubenswrapper[5003]: I1206 15:33:21.979001 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:21 crc kubenswrapper[5003]: I1206 15:33:21.979015 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:21 crc kubenswrapper[5003]: I1206 15:33:21.979037 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:21 crc kubenswrapper[5003]: I1206 15:33:21.979052 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:21Z","lastTransitionTime":"2025-12-06T15:33:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:21 crc kubenswrapper[5003]: I1206 15:33:21.992582 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:33:21Z is after 2025-08-24T17:21:41Z" Dec 06 15:33:22 crc kubenswrapper[5003]: I1206 15:33:22.003014 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:24Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:24Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ab9b858ed30211c345e146452a18252752eeedcb3be8054daa2af371297cbb61\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:33:22Z is after 2025-08-24T17:21:41Z" Dec 06 15:33:22 crc kubenswrapper[5003]: I1206 15:33:22.020762 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7xwd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a695d94-271c-45bc-8a89-dfdecb57ec00\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b26a01a6a0644d88028ebc041e00353091f26407f109e02e4aec1ec0e382a4c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b549c3d1ba39068a4f1ce344e0807eed3b4e73ee1902d02cebffe005697201d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5240dcf05f3d661b92408f4c39999571d023d9acdeb31d4a4a99f50e31a627b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e240d517f92fb2b8e24bb6f1c1d98869b41adf8bc3ab687a4426840c7010235e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b871ea5e9d2093579af10786c02da9d7f5afa5351933be742b67b1b682733119\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://276d6d2f680f4e0b2038d12b161e3fd3f85417b5d776b9661559b4812ead1dd9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9517eb57092dd5943648e1004f42c5da7be2e94b4e0460d9595a63ef5194be22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2f0ad17b1a040807cb7cca5ee53ef54da1870f63cadccb3aa20421b08016cf10\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-06T15:32:53Z\\\",\\\"message\\\":\\\"d44-bbd8-dba87b7dbaf0}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1206 15:32:53.311445 6646 ovn.go:134] Ensuring zone local for Pod openshift-multus/multus-additional-cni-plugins-j4rf7 in node crc\\\\nI1206 15:32:53.311457 6646 obj_retry.go:386] Retry successful for *v1.Pod openshift-multus/multus-additional-cni-plugins-j4rf7 after 0 failed attempt(s)\\\\nI1206 15:32:53.311473 6646 default_network_controller.go:776] Recording success event on pod openshift-multus/multus-additional-cni-plugins-j4rf7\\\\nF1206 15:32:53.311504 6646 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:53Z is after 2025-08-24\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:51Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:33:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c56c8c7aa570bb81c41923b20b09d6de6b278e56ca466e7e0ee3cecf113c37d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a00902f2c2c7ca56d86553f78094f40f59ea9ef125f5a388aafd6d7fd0c20d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9a00902f2c2c7ca56d86553f78094f40f59ea9ef125f5a388aafd6d7fd0c20d9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-p7xwd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:33:22Z is after 2025-08-24T17:21:41Z" Dec 06 15:33:22 crc kubenswrapper[5003]: I1206 15:33:22.081048 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:22 crc kubenswrapper[5003]: I1206 15:33:22.081278 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:22 crc kubenswrapper[5003]: I1206 15:33:22.081378 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:22 crc kubenswrapper[5003]: I1206 15:33:22.081462 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:22 crc kubenswrapper[5003]: I1206 15:33:22.081589 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:22Z","lastTransitionTime":"2025-12-06T15:33:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:22 crc kubenswrapper[5003]: I1206 15:33:22.183857 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:22 crc kubenswrapper[5003]: I1206 15:33:22.183886 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:22 crc kubenswrapper[5003]: I1206 15:33:22.183897 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:22 crc kubenswrapper[5003]: I1206 15:33:22.183912 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:22 crc kubenswrapper[5003]: I1206 15:33:22.183924 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:22Z","lastTransitionTime":"2025-12-06T15:33:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:22 crc kubenswrapper[5003]: I1206 15:33:22.286679 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:22 crc kubenswrapper[5003]: I1206 15:33:22.286720 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:22 crc kubenswrapper[5003]: I1206 15:33:22.286729 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:22 crc kubenswrapper[5003]: I1206 15:33:22.286742 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:22 crc kubenswrapper[5003]: I1206 15:33:22.286752 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:22Z","lastTransitionTime":"2025-12-06T15:33:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:22 crc kubenswrapper[5003]: I1206 15:33:22.388472 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:22 crc kubenswrapper[5003]: I1206 15:33:22.388547 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:22 crc kubenswrapper[5003]: I1206 15:33:22.388558 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:22 crc kubenswrapper[5003]: I1206 15:33:22.388572 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:22 crc kubenswrapper[5003]: I1206 15:33:22.388584 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:22Z","lastTransitionTime":"2025-12-06T15:33:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:22 crc kubenswrapper[5003]: I1206 15:33:22.491270 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:22 crc kubenswrapper[5003]: I1206 15:33:22.491338 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:22 crc kubenswrapper[5003]: I1206 15:33:22.491350 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:22 crc kubenswrapper[5003]: I1206 15:33:22.491365 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:22 crc kubenswrapper[5003]: I1206 15:33:22.491377 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:22Z","lastTransitionTime":"2025-12-06T15:33:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:22 crc kubenswrapper[5003]: I1206 15:33:22.594430 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:22 crc kubenswrapper[5003]: I1206 15:33:22.594482 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:22 crc kubenswrapper[5003]: I1206 15:33:22.594494 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:22 crc kubenswrapper[5003]: I1206 15:33:22.594527 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:22 crc kubenswrapper[5003]: I1206 15:33:22.594541 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:22Z","lastTransitionTime":"2025-12-06T15:33:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:22 crc kubenswrapper[5003]: I1206 15:33:22.697180 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:22 crc kubenswrapper[5003]: I1206 15:33:22.697221 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:22 crc kubenswrapper[5003]: I1206 15:33:22.697232 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:22 crc kubenswrapper[5003]: I1206 15:33:22.697248 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:22 crc kubenswrapper[5003]: I1206 15:33:22.697260 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:22Z","lastTransitionTime":"2025-12-06T15:33:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:22 crc kubenswrapper[5003]: I1206 15:33:22.712182 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-jmzd9" Dec 06 15:33:22 crc kubenswrapper[5003]: E1206 15:33:22.712309 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-jmzd9" podUID="9fa121e1-7f2f-4912-945f-86cb199c3014" Dec 06 15:33:22 crc kubenswrapper[5003]: I1206 15:33:22.800361 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:22 crc kubenswrapper[5003]: I1206 15:33:22.800457 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:22 crc kubenswrapper[5003]: I1206 15:33:22.800480 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:22 crc kubenswrapper[5003]: I1206 15:33:22.800531 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:22 crc kubenswrapper[5003]: I1206 15:33:22.800551 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:22Z","lastTransitionTime":"2025-12-06T15:33:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:22 crc kubenswrapper[5003]: I1206 15:33:22.903678 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:22 crc kubenswrapper[5003]: I1206 15:33:22.904088 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:22 crc kubenswrapper[5003]: I1206 15:33:22.904112 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:22 crc kubenswrapper[5003]: I1206 15:33:22.904141 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:22 crc kubenswrapper[5003]: I1206 15:33:22.904162 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:22Z","lastTransitionTime":"2025-12-06T15:33:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:23 crc kubenswrapper[5003]: I1206 15:33:23.007162 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:23 crc kubenswrapper[5003]: I1206 15:33:23.007227 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:23 crc kubenswrapper[5003]: I1206 15:33:23.007240 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:23 crc kubenswrapper[5003]: I1206 15:33:23.007260 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:23 crc kubenswrapper[5003]: I1206 15:33:23.007268 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:23Z","lastTransitionTime":"2025-12-06T15:33:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:23 crc kubenswrapper[5003]: I1206 15:33:23.109546 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:23 crc kubenswrapper[5003]: I1206 15:33:23.109598 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:23 crc kubenswrapper[5003]: I1206 15:33:23.109611 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:23 crc kubenswrapper[5003]: I1206 15:33:23.109628 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:23 crc kubenswrapper[5003]: I1206 15:33:23.109639 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:23Z","lastTransitionTime":"2025-12-06T15:33:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:23 crc kubenswrapper[5003]: I1206 15:33:23.175355 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-p7xwd_8a695d94-271c-45bc-8a89-dfdecb57ec00/ovnkube-controller/3.log" Dec 06 15:33:23 crc kubenswrapper[5003]: I1206 15:33:23.176152 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-p7xwd_8a695d94-271c-45bc-8a89-dfdecb57ec00/ovnkube-controller/2.log" Dec 06 15:33:23 crc kubenswrapper[5003]: I1206 15:33:23.183018 5003 generic.go:334] "Generic (PLEG): container finished" podID="8a695d94-271c-45bc-8a89-dfdecb57ec00" containerID="9517eb57092dd5943648e1004f42c5da7be2e94b4e0460d9595a63ef5194be22" exitCode=1 Dec 06 15:33:23 crc kubenswrapper[5003]: I1206 15:33:23.183060 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7xwd" event={"ID":"8a695d94-271c-45bc-8a89-dfdecb57ec00","Type":"ContainerDied","Data":"9517eb57092dd5943648e1004f42c5da7be2e94b4e0460d9595a63ef5194be22"} Dec 06 15:33:23 crc kubenswrapper[5003]: I1206 15:33:23.183106 5003 scope.go:117] "RemoveContainer" containerID="2f0ad17b1a040807cb7cca5ee53ef54da1870f63cadccb3aa20421b08016cf10" Dec 06 15:33:23 crc kubenswrapper[5003]: I1206 15:33:23.183876 5003 scope.go:117] "RemoveContainer" containerID="9517eb57092dd5943648e1004f42c5da7be2e94b4e0460d9595a63ef5194be22" Dec 06 15:33:23 crc kubenswrapper[5003]: E1206 15:33:23.184037 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-p7xwd_openshift-ovn-kubernetes(8a695d94-271c-45bc-8a89-dfdecb57ec00)\"" pod="openshift-ovn-kubernetes/ovnkube-node-p7xwd" podUID="8a695d94-271c-45bc-8a89-dfdecb57ec00" Dec 06 15:33:23 crc kubenswrapper[5003]: I1206 15:33:23.201266 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f25cfd72-ae1c-45c6-bf50-3f3cd455c1ad\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://15818f84cfa472a42a18bafe5ff4a71da326b2f5871f47e693d5e1a1b3c8b986\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8c1591171f15bdf52339cb914e52de4dad9c34f1a6b6bb882f15bb41308a5b7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://94df3e8c0295aedc3bf7b97296d443b5240d17fcd83f8e8cf1bc9730740d7f29\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3bd16db35730810799b00301bb8a68f91d69dc93dac04638d5c187bdd374393d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3bd16db35730810799b00301bb8a68f91d69dc93dac04638d5c187bdd374393d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:02Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:02Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:33:23Z is after 2025-08-24T17:21:41Z" Dec 06 15:33:23 crc kubenswrapper[5003]: I1206 15:33:23.213657 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:23 crc kubenswrapper[5003]: I1206 15:33:23.213698 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:23 crc kubenswrapper[5003]: I1206 15:33:23.213710 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:23 crc kubenswrapper[5003]: I1206 15:33:23.213727 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:23 crc kubenswrapper[5003]: I1206 15:33:23.213742 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:23Z","lastTransitionTime":"2025-12-06T15:33:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:23 crc kubenswrapper[5003]: I1206 15:33:23.217217 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:33:23Z is after 2025-08-24T17:21:41Z" Dec 06 15:33:23 crc kubenswrapper[5003]: I1206 15:33:23.228571 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-mdz5n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2a9b6e3b-054f-40de-9ad6-dd7eb78eaea1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cac97fc8a54c2fbd81b0abade11a987e5226084d8a4f75ddbbaad1f6c4cb9783\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wqmpg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:25Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-mdz5n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:33:23Z is after 2025-08-24T17:21:41Z" Dec 06 15:33:23 crc kubenswrapper[5003]: I1206 15:33:23.240378 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-w25db" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1a047c4d-003e-4668-9b96-945eab34ab68\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b3a6db6c2c8e6a283f7b46ef28ba7310d3cfc7054cfd8cbcc290ebd0d26dcf4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6dn2k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b457ba1d45f5e1524b8539c4c7dc77b24f6d5aa8172b46962e31e4fb6723b7ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6dn2k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:26Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-w25db\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:33:23Z is after 2025-08-24T17:21:41Z" Dec 06 15:33:23 crc kubenswrapper[5003]: I1206 15:33:23.259147 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-7788j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f27a64b8-ecd1-4201-a3bd-a4f5a0aa05a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4bf147399c10dd7e654abd5213c4d90e8aa9feca7f8c6032c16576a25aeace68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twj7c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f47c00cc6d341dbe1a5f3495f04fe4e695370952f6d6b209a5aaff1ace9d17e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twj7c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:39Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-7788j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:33:23Z is after 2025-08-24T17:21:41Z" Dec 06 15:33:23 crc kubenswrapper[5003]: I1206 15:33:23.274160 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5ae2d36f-5a31-4da3-aae8-0378c481f230\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9f683cdba22afa5d1d069cc32c6c83addf344c8b0ba3b39b7a2ca527408922c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c777cc5d07ff005aa8001da8a566484710f0253e4a6061e3297a884c6153a7d0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ac8291e7fa9a5ff379474802b6eae771542705aef4c443cc3123837d10dd9e5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae2215e89d65c7bd69288cda74cdd83f0349f57b47f80ff1fa03d60a484558b5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://efc65bc9bb60202069cb51eed80fb62e527399c4189554e791bab3e23993c95c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-06T15:32:20Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1206 15:32:14.957831 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1206 15:32:14.959306 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1213261896/tls.crt::/tmp/serving-cert-1213261896/tls.key\\\\\\\"\\\\nI1206 15:32:20.684925 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1206 15:32:20.690335 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1206 15:32:20.690422 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1206 15:32:20.690568 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1206 15:32:20.690599 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1206 15:32:20.700990 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1206 15:32:20.701016 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1206 15:32:20.701048 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1206 15:32:20.701062 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1206 15:32:20.701075 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1206 15:32:20.701085 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1206 15:32:20.701093 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1206 15:32:20.701100 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1206 15:32:20.702607 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:04Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6013b3a95ef99138e6ea971b51a45f65923c09435a2595bba7af91eebb28d76\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4256540a96bdcd018eb514cf70eea305513bb418afa89c8bfa5179c67822380e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4256540a96bdcd018eb514cf70eea305513bb418afa89c8bfa5179c67822380e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:33:23Z is after 2025-08-24T17:21:41Z" Dec 06 15:33:23 crc kubenswrapper[5003]: I1206 15:33:23.292814 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6825575f-413b-4b43-8b6a-34e95d3de438\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c340e7bd862f75d1d2d720236b9938e0749dc5089a0edf227045408ea8aa34a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b526ac47de443d35197518b6b4636600747093ce094cd30138d6b086b0f7da5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://269db465fc9bbf92ebd14dfe26ad8e6c24df17e114552c436de2176f1a58d326\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://52de3cb379ae4baa685f8adb4b0cbc848ac1823dd1c3e3e84df47c83bf5ef898\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://34f40b931fd028c9fd16760831dc5bcf1043536e93452caacf8ac5d1bf59d1de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bd8ae927f5dc1164e0be3c62baac330d746d3dae3167cb971054c6a3f50ce345\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bd8ae927f5dc1164e0be3c62baac330d746d3dae3167cb971054c6a3f50ce345\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fdc40421549e4a1452b2c3d3ac2cdf29143dae118af75b8049d0e455c1d5d256\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fdc40421549e4a1452b2c3d3ac2cdf29143dae118af75b8049d0e455c1d5d256\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:04Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://72ad699d40d6bbae37ece0be57e00cf8198c543af71475186ba6bca233e19c59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://72ad699d40d6bbae37ece0be57e00cf8198c543af71475186ba6bca233e19c59\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:02Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:33:23Z is after 2025-08-24T17:21:41Z" Dec 06 15:33:23 crc kubenswrapper[5003]: I1206 15:33:23.307027 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c48c2555-6e1a-4e2a-801c-de22b016a80d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d0880a937c71f4a7b46ffc4dcc10f50f5db23655dedb5d7e95b36cf0fcb44928\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f5d5d47cea1d06df2f36a0a15126c2419e5051d84842c781921577bec3c65f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5e7cb74b8b6e2a00de6dbbd1689b52fcc3ae9862d40685bab7a805646abd4a3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3377599a5ceebbce7bd9d45a6f78122fc48d86ff917a6dcfc03304ca3361659\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:02Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:33:23Z is after 2025-08-24T17:21:41Z" Dec 06 15:33:23 crc kubenswrapper[5003]: I1206 15:33:23.317728 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:23 crc kubenswrapper[5003]: I1206 15:33:23.317794 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:23 crc kubenswrapper[5003]: I1206 15:33:23.317812 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:23 crc kubenswrapper[5003]: I1206 15:33:23.317838 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:23 crc kubenswrapper[5003]: I1206 15:33:23.317856 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:23Z","lastTransitionTime":"2025-12-06T15:33:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:23 crc kubenswrapper[5003]: I1206 15:33:23.321867 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa744e792aa97558246eaa95c80a307138b1a6e08fe5de144b3fff62f3932e20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:33:23Z is after 2025-08-24T17:21:41Z" Dec 06 15:33:23 crc kubenswrapper[5003]: I1206 15:33:23.333712 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:22Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://224b845cf75da20cf87e5435770288110776a0edd92c9c110d6dc539419ecb7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0df37db3b8682032a0065b4ca1cead69bf0b32cb4e2fab89c53e14eb2169b7e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:33:23Z is after 2025-08-24T17:21:41Z" Dec 06 15:33:23 crc kubenswrapper[5003]: I1206 15:33:23.350158 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-j4rf7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7dc41f9e-e763-4a9a-a064-f65bc24332b9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://467dcc8ed3936c6fb24f6c6c3e42eccf3597ce6920d12b253946a123ee22faf8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://79eb73c778f65e6694b0a3243db40d605557c86145f9b40ce2dfcf922ec7f38f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://79eb73c778f65e6694b0a3243db40d605557c86145f9b40ce2dfcf922ec7f38f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://10cf47f05fda3c702decadc5c1cf0b6fb4df56993610f4d7bc2809e685109f6c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://10cf47f05fda3c702decadc5c1cf0b6fb4df56993610f4d7bc2809e685109f6c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a33d5c66fd9234a89ff1f28e4863f80104b5d1caccd24cafa22a4598a42906c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a33d5c66fd9234a89ff1f28e4863f80104b5d1caccd24cafa22a4598a42906c3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://895f9f2647488793d0875e8cb05a6e6515733dae00a856e6a4705702adc3a5ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://895f9f2647488793d0875e8cb05a6e6515733dae00a856e6a4705702adc3a5ed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a02f680341e0dfb569ffcd7902d5b1fe3d575a423ff541d86c7bdd4d76b733b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a02f680341e0dfb569ffcd7902d5b1fe3d575a423ff541d86c7bdd4d76b733b0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f6cda90c763eedc34903164d30c28d3cb696f658455d0a972fbf8d8e5505a587\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f6cda90c763eedc34903164d30c28d3cb696f658455d0a972fbf8d8e5505a587\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qltc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-j4rf7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:33:23Z is after 2025-08-24T17:21:41Z" Dec 06 15:33:23 crc kubenswrapper[5003]: I1206 15:33:23.361898 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:33:23Z is after 2025-08-24T17:21:41Z" Dec 06 15:33:23 crc kubenswrapper[5003]: I1206 15:33:23.374132 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:24Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:24Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ab9b858ed30211c345e146452a18252752eeedcb3be8054daa2af371297cbb61\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:33:23Z is after 2025-08-24T17:21:41Z" Dec 06 15:33:23 crc kubenswrapper[5003]: I1206 15:33:23.395290 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7xwd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a695d94-271c-45bc-8a89-dfdecb57ec00\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b26a01a6a0644d88028ebc041e00353091f26407f109e02e4aec1ec0e382a4c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b549c3d1ba39068a4f1ce344e0807eed3b4e73ee1902d02cebffe005697201d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5240dcf05f3d661b92408f4c39999571d023d9acdeb31d4a4a99f50e31a627b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e240d517f92fb2b8e24bb6f1c1d98869b41adf8bc3ab687a4426840c7010235e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b871ea5e9d2093579af10786c02da9d7f5afa5351933be742b67b1b682733119\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://276d6d2f680f4e0b2038d12b161e3fd3f85417b5d776b9661559b4812ead1dd9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9517eb57092dd5943648e1004f42c5da7be2e94b4e0460d9595a63ef5194be22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2f0ad17b1a040807cb7cca5ee53ef54da1870f63cadccb3aa20421b08016cf10\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-06T15:32:53Z\\\",\\\"message\\\":\\\"d44-bbd8-dba87b7dbaf0}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1206 15:32:53.311445 6646 ovn.go:134] Ensuring zone local for Pod openshift-multus/multus-additional-cni-plugins-j4rf7 in node crc\\\\nI1206 15:32:53.311457 6646 obj_retry.go:386] Retry successful for *v1.Pod openshift-multus/multus-additional-cni-plugins-j4rf7 after 0 failed attempt(s)\\\\nI1206 15:32:53.311473 6646 default_network_controller.go:776] Recording success event on pod openshift-multus/multus-additional-cni-plugins-j4rf7\\\\nF1206 15:32:53.311504 6646 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:32:53Z is after 2025-08-24\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:51Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9517eb57092dd5943648e1004f42c5da7be2e94b4e0460d9595a63ef5194be22\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-06T15:33:22Z\\\",\\\"message\\\":\\\"hift-kube-controller-manager-operator/metrics]} name:Service_openshift-kube-controller-manager-operator/metrics_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.219:443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {3ec9f67e-7758-4707-a6d0-2dc28f28ac37}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF1206 15:33:22.657089 7019 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:33:22Z is after 2025-08-24T17:21:41Z]\\\\nI120\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-06T15:33:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c56c8c7aa570bb81c41923b20b09d6de6b278e56ca466e7e0ee3cecf113c37d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a00902f2c2c7ca56d86553f78094f40f59ea9ef125f5a388aafd6d7fd0c20d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9a00902f2c2c7ca56d86553f78094f40f59ea9ef125f5a388aafd6d7fd0c20d9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T15:32:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5gxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-p7xwd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:33:23Z is after 2025-08-24T17:21:41Z" Dec 06 15:33:23 crc kubenswrapper[5003]: I1206 15:33:23.409197 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:21Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:33:23Z is after 2025-08-24T17:21:41Z" Dec 06 15:33:23 crc kubenswrapper[5003]: I1206 15:33:23.420365 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:23 crc kubenswrapper[5003]: I1206 15:33:23.420442 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:23 crc kubenswrapper[5003]: I1206 15:33:23.420457 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:23 crc kubenswrapper[5003]: I1206 15:33:23.420480 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:23 crc kubenswrapper[5003]: I1206 15:33:23.420522 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:23Z","lastTransitionTime":"2025-12-06T15:33:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:23 crc kubenswrapper[5003]: I1206 15:33:23.426268 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-qcqkl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"956ed4a8-8918-48eb-a2f8-f35a9ae17fde\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8ae5ee47d26422cffca5f12a4ee1455dcd34c148b1b490d69b88280b4497da11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:32:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbhrw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-qcqkl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:33:23Z is after 2025-08-24T17:21:41Z" Dec 06 15:33:23 crc kubenswrapper[5003]: I1206 15:33:23.442061 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-9kdpn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"350e8b9a-b7bf-4dc9-abe9-d10f7a088be3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:33:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:33:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22d16feb3425c5cac7562c4468723b0aae567d2d31db5516b3b0ce7d38d91c6b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e9b32883e20da5b298374f7eebd0a21c5ca51cca23543fc78dfe3edb04e3b661\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-06T15:33:15Z\\\",\\\"message\\\":\\\"2025-12-06T15:32:30+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_572b1774-f930-470b-aa25-26a311d76ba2\\\\n2025-12-06T15:32:30+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_572b1774-f930-470b-aa25-26a311d76ba2 to /host/opt/cni/bin/\\\\n2025-12-06T15:32:30Z [verbose] multus-daemon started\\\\n2025-12-06T15:32:30Z [verbose] Readiness Indicator file check\\\\n2025-12-06T15:33:15Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-06T15:32:28Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T15:33:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-46c8r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-9kdpn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:33:23Z is after 2025-08-24T17:21:41Z" Dec 06 15:33:23 crc kubenswrapper[5003]: I1206 15:33:23.457530 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-jmzd9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9fa121e1-7f2f-4912-945f-86cb199c3014\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:41Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T15:32:41Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9s4lw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9s4lw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T15:32:41Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-jmzd9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:33:23Z is after 2025-08-24T17:21:41Z" Dec 06 15:33:23 crc kubenswrapper[5003]: I1206 15:33:23.523726 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:23 crc kubenswrapper[5003]: I1206 15:33:23.523984 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:23 crc kubenswrapper[5003]: I1206 15:33:23.524063 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:23 crc kubenswrapper[5003]: I1206 15:33:23.524129 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:23 crc kubenswrapper[5003]: I1206 15:33:23.524205 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:23Z","lastTransitionTime":"2025-12-06T15:33:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:23 crc kubenswrapper[5003]: I1206 15:33:23.626905 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:23 crc kubenswrapper[5003]: I1206 15:33:23.626948 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:23 crc kubenswrapper[5003]: I1206 15:33:23.626959 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:23 crc kubenswrapper[5003]: I1206 15:33:23.626977 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:23 crc kubenswrapper[5003]: I1206 15:33:23.626989 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:23Z","lastTransitionTime":"2025-12-06T15:33:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:23 crc kubenswrapper[5003]: I1206 15:33:23.712214 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 06 15:33:23 crc kubenswrapper[5003]: E1206 15:33:23.712339 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 06 15:33:23 crc kubenswrapper[5003]: I1206 15:33:23.712539 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 06 15:33:23 crc kubenswrapper[5003]: E1206 15:33:23.712590 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 06 15:33:23 crc kubenswrapper[5003]: I1206 15:33:23.712853 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 06 15:33:23 crc kubenswrapper[5003]: E1206 15:33:23.712982 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 06 15:33:23 crc kubenswrapper[5003]: I1206 15:33:23.729610 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:23 crc kubenswrapper[5003]: I1206 15:33:23.729669 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:23 crc kubenswrapper[5003]: I1206 15:33:23.729679 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:23 crc kubenswrapper[5003]: I1206 15:33:23.729709 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:23 crc kubenswrapper[5003]: I1206 15:33:23.729720 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:23Z","lastTransitionTime":"2025-12-06T15:33:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:23 crc kubenswrapper[5003]: I1206 15:33:23.832189 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:23 crc kubenswrapper[5003]: I1206 15:33:23.832234 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:23 crc kubenswrapper[5003]: I1206 15:33:23.832246 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:23 crc kubenswrapper[5003]: I1206 15:33:23.832261 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:23 crc kubenswrapper[5003]: I1206 15:33:23.832272 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:23Z","lastTransitionTime":"2025-12-06T15:33:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:23 crc kubenswrapper[5003]: I1206 15:33:23.935107 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:23 crc kubenswrapper[5003]: I1206 15:33:23.935144 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:23 crc kubenswrapper[5003]: I1206 15:33:23.935153 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:23 crc kubenswrapper[5003]: I1206 15:33:23.935166 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:23 crc kubenswrapper[5003]: I1206 15:33:23.935175 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:23Z","lastTransitionTime":"2025-12-06T15:33:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:24 crc kubenswrapper[5003]: I1206 15:33:24.037976 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:24 crc kubenswrapper[5003]: I1206 15:33:24.038043 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:24 crc kubenswrapper[5003]: I1206 15:33:24.038062 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:24 crc kubenswrapper[5003]: I1206 15:33:24.038090 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:24 crc kubenswrapper[5003]: I1206 15:33:24.038109 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:24Z","lastTransitionTime":"2025-12-06T15:33:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:24 crc kubenswrapper[5003]: I1206 15:33:24.140581 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:24 crc kubenswrapper[5003]: I1206 15:33:24.140623 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:24 crc kubenswrapper[5003]: I1206 15:33:24.140634 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:24 crc kubenswrapper[5003]: I1206 15:33:24.140652 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:24 crc kubenswrapper[5003]: I1206 15:33:24.140663 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:24Z","lastTransitionTime":"2025-12-06T15:33:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:24 crc kubenswrapper[5003]: I1206 15:33:24.189459 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-p7xwd_8a695d94-271c-45bc-8a89-dfdecb57ec00/ovnkube-controller/3.log" Dec 06 15:33:24 crc kubenswrapper[5003]: I1206 15:33:24.243152 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:24 crc kubenswrapper[5003]: I1206 15:33:24.243213 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:24 crc kubenswrapper[5003]: I1206 15:33:24.243229 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:24 crc kubenswrapper[5003]: I1206 15:33:24.243252 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:24 crc kubenswrapper[5003]: I1206 15:33:24.243269 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:24Z","lastTransitionTime":"2025-12-06T15:33:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:24 crc kubenswrapper[5003]: I1206 15:33:24.346030 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:24 crc kubenswrapper[5003]: I1206 15:33:24.346151 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:24 crc kubenswrapper[5003]: I1206 15:33:24.346170 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:24 crc kubenswrapper[5003]: I1206 15:33:24.346608 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:24 crc kubenswrapper[5003]: I1206 15:33:24.346665 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:24Z","lastTransitionTime":"2025-12-06T15:33:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:24 crc kubenswrapper[5003]: I1206 15:33:24.449738 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:24 crc kubenswrapper[5003]: I1206 15:33:24.449769 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:24 crc kubenswrapper[5003]: I1206 15:33:24.449778 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:24 crc kubenswrapper[5003]: I1206 15:33:24.449820 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:24 crc kubenswrapper[5003]: I1206 15:33:24.449839 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:24Z","lastTransitionTime":"2025-12-06T15:33:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:24 crc kubenswrapper[5003]: I1206 15:33:24.553160 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:24 crc kubenswrapper[5003]: I1206 15:33:24.553232 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:24 crc kubenswrapper[5003]: I1206 15:33:24.553248 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:24 crc kubenswrapper[5003]: I1206 15:33:24.553268 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:24 crc kubenswrapper[5003]: I1206 15:33:24.553281 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:24Z","lastTransitionTime":"2025-12-06T15:33:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:24 crc kubenswrapper[5003]: I1206 15:33:24.655147 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:24 crc kubenswrapper[5003]: I1206 15:33:24.655195 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:24 crc kubenswrapper[5003]: I1206 15:33:24.655209 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:24 crc kubenswrapper[5003]: I1206 15:33:24.655226 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:24 crc kubenswrapper[5003]: I1206 15:33:24.655235 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:24Z","lastTransitionTime":"2025-12-06T15:33:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:24 crc kubenswrapper[5003]: I1206 15:33:24.712053 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-jmzd9" Dec 06 15:33:24 crc kubenswrapper[5003]: E1206 15:33:24.712188 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-jmzd9" podUID="9fa121e1-7f2f-4912-945f-86cb199c3014" Dec 06 15:33:24 crc kubenswrapper[5003]: I1206 15:33:24.757699 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:24 crc kubenswrapper[5003]: I1206 15:33:24.757747 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:24 crc kubenswrapper[5003]: I1206 15:33:24.757761 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:24 crc kubenswrapper[5003]: I1206 15:33:24.757780 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:24 crc kubenswrapper[5003]: I1206 15:33:24.757794 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:24Z","lastTransitionTime":"2025-12-06T15:33:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:24 crc kubenswrapper[5003]: I1206 15:33:24.861653 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:24 crc kubenswrapper[5003]: I1206 15:33:24.861710 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:24 crc kubenswrapper[5003]: I1206 15:33:24.861723 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:24 crc kubenswrapper[5003]: I1206 15:33:24.861741 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:24 crc kubenswrapper[5003]: I1206 15:33:24.861753 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:24Z","lastTransitionTime":"2025-12-06T15:33:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:24 crc kubenswrapper[5003]: I1206 15:33:24.963485 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:24 crc kubenswrapper[5003]: I1206 15:33:24.963559 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:24 crc kubenswrapper[5003]: I1206 15:33:24.963573 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:24 crc kubenswrapper[5003]: I1206 15:33:24.963594 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:24 crc kubenswrapper[5003]: I1206 15:33:24.963608 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:24Z","lastTransitionTime":"2025-12-06T15:33:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:25 crc kubenswrapper[5003]: I1206 15:33:25.066740 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:25 crc kubenswrapper[5003]: I1206 15:33:25.066810 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:25 crc kubenswrapper[5003]: I1206 15:33:25.066829 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:25 crc kubenswrapper[5003]: I1206 15:33:25.066855 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:25 crc kubenswrapper[5003]: I1206 15:33:25.066874 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:25Z","lastTransitionTime":"2025-12-06T15:33:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:25 crc kubenswrapper[5003]: I1206 15:33:25.169282 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:25 crc kubenswrapper[5003]: I1206 15:33:25.169351 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:25 crc kubenswrapper[5003]: I1206 15:33:25.169366 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:25 crc kubenswrapper[5003]: I1206 15:33:25.169388 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:25 crc kubenswrapper[5003]: I1206 15:33:25.169401 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:25Z","lastTransitionTime":"2025-12-06T15:33:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:25 crc kubenswrapper[5003]: I1206 15:33:25.272507 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:25 crc kubenswrapper[5003]: I1206 15:33:25.272565 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:25 crc kubenswrapper[5003]: I1206 15:33:25.272580 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:25 crc kubenswrapper[5003]: I1206 15:33:25.272608 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:25 crc kubenswrapper[5003]: I1206 15:33:25.272622 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:25Z","lastTransitionTime":"2025-12-06T15:33:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:25 crc kubenswrapper[5003]: I1206 15:33:25.375654 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:25 crc kubenswrapper[5003]: I1206 15:33:25.375713 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:25 crc kubenswrapper[5003]: I1206 15:33:25.375730 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:25 crc kubenswrapper[5003]: I1206 15:33:25.375753 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:25 crc kubenswrapper[5003]: I1206 15:33:25.375769 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:25Z","lastTransitionTime":"2025-12-06T15:33:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:25 crc kubenswrapper[5003]: I1206 15:33:25.477468 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:25 crc kubenswrapper[5003]: I1206 15:33:25.477537 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:25 crc kubenswrapper[5003]: I1206 15:33:25.477550 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:25 crc kubenswrapper[5003]: I1206 15:33:25.477567 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:25 crc kubenswrapper[5003]: I1206 15:33:25.477580 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:25Z","lastTransitionTime":"2025-12-06T15:33:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:25 crc kubenswrapper[5003]: I1206 15:33:25.579949 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:25 crc kubenswrapper[5003]: I1206 15:33:25.579995 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:25 crc kubenswrapper[5003]: I1206 15:33:25.580003 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:25 crc kubenswrapper[5003]: I1206 15:33:25.580018 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:25 crc kubenswrapper[5003]: I1206 15:33:25.580029 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:25Z","lastTransitionTime":"2025-12-06T15:33:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:25 crc kubenswrapper[5003]: I1206 15:33:25.682453 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:25 crc kubenswrapper[5003]: I1206 15:33:25.682532 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:25 crc kubenswrapper[5003]: I1206 15:33:25.682544 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:25 crc kubenswrapper[5003]: I1206 15:33:25.682561 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:25 crc kubenswrapper[5003]: I1206 15:33:25.682573 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:25Z","lastTransitionTime":"2025-12-06T15:33:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:25 crc kubenswrapper[5003]: I1206 15:33:25.697976 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 06 15:33:25 crc kubenswrapper[5003]: I1206 15:33:25.698064 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 06 15:33:25 crc kubenswrapper[5003]: E1206 15:33:25.698095 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-06 15:34:29.698070241 +0000 UTC m=+148.231424632 (durationBeforeRetry 1m4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 15:33:25 crc kubenswrapper[5003]: I1206 15:33:25.698126 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 06 15:33:25 crc kubenswrapper[5003]: E1206 15:33:25.698165 5003 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 06 15:33:25 crc kubenswrapper[5003]: I1206 15:33:25.698164 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 06 15:33:25 crc kubenswrapper[5003]: E1206 15:33:25.698199 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-06 15:34:29.698190575 +0000 UTC m=+148.231544956 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 06 15:33:25 crc kubenswrapper[5003]: I1206 15:33:25.698214 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 06 15:33:25 crc kubenswrapper[5003]: E1206 15:33:25.698269 5003 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 06 15:33:25 crc kubenswrapper[5003]: E1206 15:33:25.698283 5003 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 06 15:33:25 crc kubenswrapper[5003]: E1206 15:33:25.698295 5003 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 06 15:33:25 crc kubenswrapper[5003]: E1206 15:33:25.698315 5003 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 06 15:33:25 crc kubenswrapper[5003]: E1206 15:33:25.698326 5003 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 06 15:33:25 crc kubenswrapper[5003]: E1206 15:33:25.698320 5003 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 06 15:33:25 crc kubenswrapper[5003]: E1206 15:33:25.698333 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-06 15:34:29.698324048 +0000 UTC m=+148.231678439 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 06 15:33:25 crc kubenswrapper[5003]: E1206 15:33:25.698335 5003 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 06 15:33:25 crc kubenswrapper[5003]: E1206 15:33:25.698423 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-06 15:34:29.69840365 +0000 UTC m=+148.231758031 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 06 15:33:25 crc kubenswrapper[5003]: E1206 15:33:25.698515 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-06 15:34:29.698473522 +0000 UTC m=+148.231827983 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 06 15:33:25 crc kubenswrapper[5003]: I1206 15:33:25.712447 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 06 15:33:25 crc kubenswrapper[5003]: I1206 15:33:25.712458 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 06 15:33:25 crc kubenswrapper[5003]: I1206 15:33:25.712591 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 06 15:33:25 crc kubenswrapper[5003]: E1206 15:33:25.712696 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 06 15:33:25 crc kubenswrapper[5003]: E1206 15:33:25.712791 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 06 15:33:25 crc kubenswrapper[5003]: E1206 15:33:25.712884 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 06 15:33:25 crc kubenswrapper[5003]: I1206 15:33:25.784823 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:25 crc kubenswrapper[5003]: I1206 15:33:25.784882 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:25 crc kubenswrapper[5003]: I1206 15:33:25.784891 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:25 crc kubenswrapper[5003]: I1206 15:33:25.784905 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:25 crc kubenswrapper[5003]: I1206 15:33:25.784915 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:25Z","lastTransitionTime":"2025-12-06T15:33:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:25 crc kubenswrapper[5003]: I1206 15:33:25.886899 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:25 crc kubenswrapper[5003]: I1206 15:33:25.886932 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:25 crc kubenswrapper[5003]: I1206 15:33:25.886940 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:25 crc kubenswrapper[5003]: I1206 15:33:25.886952 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:25 crc kubenswrapper[5003]: I1206 15:33:25.886962 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:25Z","lastTransitionTime":"2025-12-06T15:33:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:25 crc kubenswrapper[5003]: I1206 15:33:25.990276 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:25 crc kubenswrapper[5003]: I1206 15:33:25.990316 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:25 crc kubenswrapper[5003]: I1206 15:33:25.990325 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:25 crc kubenswrapper[5003]: I1206 15:33:25.990340 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:25 crc kubenswrapper[5003]: I1206 15:33:25.990351 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:25Z","lastTransitionTime":"2025-12-06T15:33:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:26 crc kubenswrapper[5003]: I1206 15:33:26.093535 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:26 crc kubenswrapper[5003]: I1206 15:33:26.093634 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:26 crc kubenswrapper[5003]: I1206 15:33:26.093651 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:26 crc kubenswrapper[5003]: I1206 15:33:26.093679 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:26 crc kubenswrapper[5003]: I1206 15:33:26.093691 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:26Z","lastTransitionTime":"2025-12-06T15:33:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:26 crc kubenswrapper[5003]: I1206 15:33:26.196563 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:26 crc kubenswrapper[5003]: I1206 15:33:26.196642 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:26 crc kubenswrapper[5003]: I1206 15:33:26.196667 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:26 crc kubenswrapper[5003]: I1206 15:33:26.196697 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:26 crc kubenswrapper[5003]: I1206 15:33:26.196721 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:26Z","lastTransitionTime":"2025-12-06T15:33:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:26 crc kubenswrapper[5003]: I1206 15:33:26.299735 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:26 crc kubenswrapper[5003]: I1206 15:33:26.299860 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:26 crc kubenswrapper[5003]: I1206 15:33:26.299910 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:26 crc kubenswrapper[5003]: I1206 15:33:26.299946 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:26 crc kubenswrapper[5003]: I1206 15:33:26.299963 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:26Z","lastTransitionTime":"2025-12-06T15:33:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:26 crc kubenswrapper[5003]: I1206 15:33:26.364803 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:26 crc kubenswrapper[5003]: I1206 15:33:26.364886 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:26 crc kubenswrapper[5003]: I1206 15:33:26.364903 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:26 crc kubenswrapper[5003]: I1206 15:33:26.364927 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:26 crc kubenswrapper[5003]: I1206 15:33:26.364945 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:26Z","lastTransitionTime":"2025-12-06T15:33:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:26 crc kubenswrapper[5003]: E1206 15:33:26.385724 5003 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-06T15:33:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-06T15:33:26Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-06T15:33:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-06T15:33:26Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-06T15:33:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-06T15:33:26Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-06T15:33:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-06T15:33:26Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"010ec561-c8bb-454b-ab74-658add58caba\\\",\\\"systemUUID\\\":\\\"42b9e6db-6fd4-4e84-a5b7-176f28bfe2f1\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:33:26Z is after 2025-08-24T17:21:41Z" Dec 06 15:33:26 crc kubenswrapper[5003]: I1206 15:33:26.390641 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:26 crc kubenswrapper[5003]: I1206 15:33:26.390692 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:26 crc kubenswrapper[5003]: I1206 15:33:26.390708 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:26 crc kubenswrapper[5003]: I1206 15:33:26.390734 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:26 crc kubenswrapper[5003]: I1206 15:33:26.390752 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:26Z","lastTransitionTime":"2025-12-06T15:33:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:26 crc kubenswrapper[5003]: E1206 15:33:26.406233 5003 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-06T15:33:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-06T15:33:26Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-06T15:33:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-06T15:33:26Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-06T15:33:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-06T15:33:26Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-06T15:33:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-06T15:33:26Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"010ec561-c8bb-454b-ab74-658add58caba\\\",\\\"systemUUID\\\":\\\"42b9e6db-6fd4-4e84-a5b7-176f28bfe2f1\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:33:26Z is after 2025-08-24T17:21:41Z" Dec 06 15:33:26 crc kubenswrapper[5003]: I1206 15:33:26.410172 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:26 crc kubenswrapper[5003]: I1206 15:33:26.410214 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:26 crc kubenswrapper[5003]: I1206 15:33:26.410256 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:26 crc kubenswrapper[5003]: I1206 15:33:26.410276 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:26 crc kubenswrapper[5003]: I1206 15:33:26.410316 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:26Z","lastTransitionTime":"2025-12-06T15:33:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:26 crc kubenswrapper[5003]: E1206 15:33:26.430394 5003 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-06T15:33:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-06T15:33:26Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-06T15:33:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-06T15:33:26Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-06T15:33:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-06T15:33:26Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-06T15:33:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-06T15:33:26Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"010ec561-c8bb-454b-ab74-658add58caba\\\",\\\"systemUUID\\\":\\\"42b9e6db-6fd4-4e84-a5b7-176f28bfe2f1\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:33:26Z is after 2025-08-24T17:21:41Z" Dec 06 15:33:26 crc kubenswrapper[5003]: I1206 15:33:26.434628 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:26 crc kubenswrapper[5003]: I1206 15:33:26.434669 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:26 crc kubenswrapper[5003]: I1206 15:33:26.434683 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:26 crc kubenswrapper[5003]: I1206 15:33:26.434700 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:26 crc kubenswrapper[5003]: I1206 15:33:26.434716 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:26Z","lastTransitionTime":"2025-12-06T15:33:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:26 crc kubenswrapper[5003]: E1206 15:33:26.449858 5003 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-06T15:33:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-06T15:33:26Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-06T15:33:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-06T15:33:26Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-06T15:33:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-06T15:33:26Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-06T15:33:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-06T15:33:26Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"010ec561-c8bb-454b-ab74-658add58caba\\\",\\\"systemUUID\\\":\\\"42b9e6db-6fd4-4e84-a5b7-176f28bfe2f1\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:33:26Z is after 2025-08-24T17:21:41Z" Dec 06 15:33:26 crc kubenswrapper[5003]: I1206 15:33:26.454562 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:26 crc kubenswrapper[5003]: I1206 15:33:26.454595 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:26 crc kubenswrapper[5003]: I1206 15:33:26.454606 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:26 crc kubenswrapper[5003]: I1206 15:33:26.454624 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:26 crc kubenswrapper[5003]: I1206 15:33:26.454636 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:26Z","lastTransitionTime":"2025-12-06T15:33:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:26 crc kubenswrapper[5003]: E1206 15:33:26.473437 5003 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-06T15:33:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-06T15:33:26Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-06T15:33:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-06T15:33:26Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-06T15:33:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-06T15:33:26Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-06T15:33:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-06T15:33:26Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"010ec561-c8bb-454b-ab74-658add58caba\\\",\\\"systemUUID\\\":\\\"42b9e6db-6fd4-4e84-a5b7-176f28bfe2f1\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T15:33:26Z is after 2025-08-24T17:21:41Z" Dec 06 15:33:26 crc kubenswrapper[5003]: E1206 15:33:26.473592 5003 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 06 15:33:26 crc kubenswrapper[5003]: I1206 15:33:26.475381 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:26 crc kubenswrapper[5003]: I1206 15:33:26.475434 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:26 crc kubenswrapper[5003]: I1206 15:33:26.475446 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:26 crc kubenswrapper[5003]: I1206 15:33:26.475468 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:26 crc kubenswrapper[5003]: I1206 15:33:26.475481 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:26Z","lastTransitionTime":"2025-12-06T15:33:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:26 crc kubenswrapper[5003]: I1206 15:33:26.578011 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:26 crc kubenswrapper[5003]: I1206 15:33:26.578077 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:26 crc kubenswrapper[5003]: I1206 15:33:26.578100 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:26 crc kubenswrapper[5003]: I1206 15:33:26.578124 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:26 crc kubenswrapper[5003]: I1206 15:33:26.578145 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:26Z","lastTransitionTime":"2025-12-06T15:33:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:26 crc kubenswrapper[5003]: I1206 15:33:26.681870 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:26 crc kubenswrapper[5003]: I1206 15:33:26.682136 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:26 crc kubenswrapper[5003]: I1206 15:33:26.682162 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:26 crc kubenswrapper[5003]: I1206 15:33:26.682197 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:26 crc kubenswrapper[5003]: I1206 15:33:26.682263 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:26Z","lastTransitionTime":"2025-12-06T15:33:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:26 crc kubenswrapper[5003]: I1206 15:33:26.712179 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-jmzd9" Dec 06 15:33:26 crc kubenswrapper[5003]: E1206 15:33:26.712395 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-jmzd9" podUID="9fa121e1-7f2f-4912-945f-86cb199c3014" Dec 06 15:33:26 crc kubenswrapper[5003]: I1206 15:33:26.785417 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:26 crc kubenswrapper[5003]: I1206 15:33:26.785498 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:26 crc kubenswrapper[5003]: I1206 15:33:26.785512 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:26 crc kubenswrapper[5003]: I1206 15:33:26.785530 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:26 crc kubenswrapper[5003]: I1206 15:33:26.785541 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:26Z","lastTransitionTime":"2025-12-06T15:33:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:26 crc kubenswrapper[5003]: I1206 15:33:26.889073 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:26 crc kubenswrapper[5003]: I1206 15:33:26.889132 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:26 crc kubenswrapper[5003]: I1206 15:33:26.889144 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:26 crc kubenswrapper[5003]: I1206 15:33:26.889163 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:26 crc kubenswrapper[5003]: I1206 15:33:26.889175 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:26Z","lastTransitionTime":"2025-12-06T15:33:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:26 crc kubenswrapper[5003]: I1206 15:33:26.992540 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:26 crc kubenswrapper[5003]: I1206 15:33:26.992610 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:26 crc kubenswrapper[5003]: I1206 15:33:26.992621 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:26 crc kubenswrapper[5003]: I1206 15:33:26.992642 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:26 crc kubenswrapper[5003]: I1206 15:33:26.992654 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:26Z","lastTransitionTime":"2025-12-06T15:33:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:27 crc kubenswrapper[5003]: I1206 15:33:27.095030 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:27 crc kubenswrapper[5003]: I1206 15:33:27.095062 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:27 crc kubenswrapper[5003]: I1206 15:33:27.095069 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:27 crc kubenswrapper[5003]: I1206 15:33:27.095082 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:27 crc kubenswrapper[5003]: I1206 15:33:27.095090 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:27Z","lastTransitionTime":"2025-12-06T15:33:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:27 crc kubenswrapper[5003]: I1206 15:33:27.197715 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:27 crc kubenswrapper[5003]: I1206 15:33:27.197774 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:27 crc kubenswrapper[5003]: I1206 15:33:27.197793 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:27 crc kubenswrapper[5003]: I1206 15:33:27.197817 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:27 crc kubenswrapper[5003]: I1206 15:33:27.197836 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:27Z","lastTransitionTime":"2025-12-06T15:33:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:27 crc kubenswrapper[5003]: I1206 15:33:27.300489 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:27 crc kubenswrapper[5003]: I1206 15:33:27.300579 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:27 crc kubenswrapper[5003]: I1206 15:33:27.300599 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:27 crc kubenswrapper[5003]: I1206 15:33:27.300618 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:27 crc kubenswrapper[5003]: I1206 15:33:27.300632 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:27Z","lastTransitionTime":"2025-12-06T15:33:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:27 crc kubenswrapper[5003]: I1206 15:33:27.403464 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:27 crc kubenswrapper[5003]: I1206 15:33:27.403573 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:27 crc kubenswrapper[5003]: I1206 15:33:27.403590 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:27 crc kubenswrapper[5003]: I1206 15:33:27.403607 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:27 crc kubenswrapper[5003]: I1206 15:33:27.403619 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:27Z","lastTransitionTime":"2025-12-06T15:33:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:27 crc kubenswrapper[5003]: I1206 15:33:27.506578 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:27 crc kubenswrapper[5003]: I1206 15:33:27.506610 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:27 crc kubenswrapper[5003]: I1206 15:33:27.506619 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:27 crc kubenswrapper[5003]: I1206 15:33:27.506631 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:27 crc kubenswrapper[5003]: I1206 15:33:27.506641 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:27Z","lastTransitionTime":"2025-12-06T15:33:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:27 crc kubenswrapper[5003]: I1206 15:33:27.609521 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:27 crc kubenswrapper[5003]: I1206 15:33:27.609562 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:27 crc kubenswrapper[5003]: I1206 15:33:27.609574 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:27 crc kubenswrapper[5003]: I1206 15:33:27.609591 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:27 crc kubenswrapper[5003]: I1206 15:33:27.609603 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:27Z","lastTransitionTime":"2025-12-06T15:33:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:27 crc kubenswrapper[5003]: I1206 15:33:27.711431 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 06 15:33:27 crc kubenswrapper[5003]: I1206 15:33:27.711443 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 06 15:33:27 crc kubenswrapper[5003]: I1206 15:33:27.711522 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 06 15:33:27 crc kubenswrapper[5003]: E1206 15:33:27.711572 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 06 15:33:27 crc kubenswrapper[5003]: E1206 15:33:27.711793 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 06 15:33:27 crc kubenswrapper[5003]: I1206 15:33:27.712204 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:27 crc kubenswrapper[5003]: I1206 15:33:27.712233 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:27 crc kubenswrapper[5003]: I1206 15:33:27.712244 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:27 crc kubenswrapper[5003]: I1206 15:33:27.712258 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:27 crc kubenswrapper[5003]: E1206 15:33:27.712227 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 06 15:33:27 crc kubenswrapper[5003]: I1206 15:33:27.712270 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:27Z","lastTransitionTime":"2025-12-06T15:33:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:27 crc kubenswrapper[5003]: I1206 15:33:27.815871 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:27 crc kubenswrapper[5003]: I1206 15:33:27.815929 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:27 crc kubenswrapper[5003]: I1206 15:33:27.815942 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:27 crc kubenswrapper[5003]: I1206 15:33:27.815957 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:27 crc kubenswrapper[5003]: I1206 15:33:27.815970 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:27Z","lastTransitionTime":"2025-12-06T15:33:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:27 crc kubenswrapper[5003]: I1206 15:33:27.921621 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:27 crc kubenswrapper[5003]: I1206 15:33:27.921692 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:27 crc kubenswrapper[5003]: I1206 15:33:27.921713 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:27 crc kubenswrapper[5003]: I1206 15:33:27.921741 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:27 crc kubenswrapper[5003]: I1206 15:33:27.921762 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:27Z","lastTransitionTime":"2025-12-06T15:33:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:28 crc kubenswrapper[5003]: I1206 15:33:28.023771 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:28 crc kubenswrapper[5003]: I1206 15:33:28.023855 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:28 crc kubenswrapper[5003]: I1206 15:33:28.023876 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:28 crc kubenswrapper[5003]: I1206 15:33:28.023906 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:28 crc kubenswrapper[5003]: I1206 15:33:28.023925 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:28Z","lastTransitionTime":"2025-12-06T15:33:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:28 crc kubenswrapper[5003]: I1206 15:33:28.125467 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:28 crc kubenswrapper[5003]: I1206 15:33:28.125556 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:28 crc kubenswrapper[5003]: I1206 15:33:28.125578 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:28 crc kubenswrapper[5003]: I1206 15:33:28.125599 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:28 crc kubenswrapper[5003]: I1206 15:33:28.125616 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:28Z","lastTransitionTime":"2025-12-06T15:33:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:28 crc kubenswrapper[5003]: I1206 15:33:28.227610 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:28 crc kubenswrapper[5003]: I1206 15:33:28.227671 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:28 crc kubenswrapper[5003]: I1206 15:33:28.227680 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:28 crc kubenswrapper[5003]: I1206 15:33:28.227694 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:28 crc kubenswrapper[5003]: I1206 15:33:28.227704 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:28Z","lastTransitionTime":"2025-12-06T15:33:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:28 crc kubenswrapper[5003]: I1206 15:33:28.329693 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:28 crc kubenswrapper[5003]: I1206 15:33:28.329728 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:28 crc kubenswrapper[5003]: I1206 15:33:28.329736 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:28 crc kubenswrapper[5003]: I1206 15:33:28.329751 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:28 crc kubenswrapper[5003]: I1206 15:33:28.329760 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:28Z","lastTransitionTime":"2025-12-06T15:33:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:28 crc kubenswrapper[5003]: I1206 15:33:28.432744 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:28 crc kubenswrapper[5003]: I1206 15:33:28.432786 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:28 crc kubenswrapper[5003]: I1206 15:33:28.432797 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:28 crc kubenswrapper[5003]: I1206 15:33:28.432813 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:28 crc kubenswrapper[5003]: I1206 15:33:28.432826 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:28Z","lastTransitionTime":"2025-12-06T15:33:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:28 crc kubenswrapper[5003]: I1206 15:33:28.535841 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:28 crc kubenswrapper[5003]: I1206 15:33:28.535879 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:28 crc kubenswrapper[5003]: I1206 15:33:28.535891 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:28 crc kubenswrapper[5003]: I1206 15:33:28.535907 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:28 crc kubenswrapper[5003]: I1206 15:33:28.535920 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:28Z","lastTransitionTime":"2025-12-06T15:33:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:28 crc kubenswrapper[5003]: I1206 15:33:28.637766 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:28 crc kubenswrapper[5003]: I1206 15:33:28.637824 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:28 crc kubenswrapper[5003]: I1206 15:33:28.637841 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:28 crc kubenswrapper[5003]: I1206 15:33:28.637868 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:28 crc kubenswrapper[5003]: I1206 15:33:28.637885 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:28Z","lastTransitionTime":"2025-12-06T15:33:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:28 crc kubenswrapper[5003]: I1206 15:33:28.711892 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-jmzd9" Dec 06 15:33:28 crc kubenswrapper[5003]: E1206 15:33:28.712111 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-jmzd9" podUID="9fa121e1-7f2f-4912-945f-86cb199c3014" Dec 06 15:33:28 crc kubenswrapper[5003]: I1206 15:33:28.741697 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:28 crc kubenswrapper[5003]: I1206 15:33:28.741757 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:28 crc kubenswrapper[5003]: I1206 15:33:28.741774 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:28 crc kubenswrapper[5003]: I1206 15:33:28.741799 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:28 crc kubenswrapper[5003]: I1206 15:33:28.741816 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:28Z","lastTransitionTime":"2025-12-06T15:33:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:28 crc kubenswrapper[5003]: I1206 15:33:28.844464 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:28 crc kubenswrapper[5003]: I1206 15:33:28.844521 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:28 crc kubenswrapper[5003]: I1206 15:33:28.844533 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:28 crc kubenswrapper[5003]: I1206 15:33:28.844550 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:28 crc kubenswrapper[5003]: I1206 15:33:28.844561 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:28Z","lastTransitionTime":"2025-12-06T15:33:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:28 crc kubenswrapper[5003]: I1206 15:33:28.949212 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:28 crc kubenswrapper[5003]: I1206 15:33:28.949357 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:28 crc kubenswrapper[5003]: I1206 15:33:28.949387 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:28 crc kubenswrapper[5003]: I1206 15:33:28.949428 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:28 crc kubenswrapper[5003]: I1206 15:33:28.949469 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:28Z","lastTransitionTime":"2025-12-06T15:33:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:29 crc kubenswrapper[5003]: I1206 15:33:29.054122 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:29 crc kubenswrapper[5003]: I1206 15:33:29.054181 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:29 crc kubenswrapper[5003]: I1206 15:33:29.054199 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:29 crc kubenswrapper[5003]: I1206 15:33:29.054222 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:29 crc kubenswrapper[5003]: I1206 15:33:29.054239 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:29Z","lastTransitionTime":"2025-12-06T15:33:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:29 crc kubenswrapper[5003]: I1206 15:33:29.157231 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:29 crc kubenswrapper[5003]: I1206 15:33:29.157281 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:29 crc kubenswrapper[5003]: I1206 15:33:29.157293 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:29 crc kubenswrapper[5003]: I1206 15:33:29.157310 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:29 crc kubenswrapper[5003]: I1206 15:33:29.157321 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:29Z","lastTransitionTime":"2025-12-06T15:33:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:29 crc kubenswrapper[5003]: I1206 15:33:29.260842 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:29 crc kubenswrapper[5003]: I1206 15:33:29.260900 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:29 crc kubenswrapper[5003]: I1206 15:33:29.260909 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:29 crc kubenswrapper[5003]: I1206 15:33:29.260930 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:29 crc kubenswrapper[5003]: I1206 15:33:29.260945 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:29Z","lastTransitionTime":"2025-12-06T15:33:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:29 crc kubenswrapper[5003]: I1206 15:33:29.363542 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:29 crc kubenswrapper[5003]: I1206 15:33:29.363580 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:29 crc kubenswrapper[5003]: I1206 15:33:29.363591 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:29 crc kubenswrapper[5003]: I1206 15:33:29.363607 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:29 crc kubenswrapper[5003]: I1206 15:33:29.363620 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:29Z","lastTransitionTime":"2025-12-06T15:33:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:29 crc kubenswrapper[5003]: I1206 15:33:29.466870 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:29 crc kubenswrapper[5003]: I1206 15:33:29.466939 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:29 crc kubenswrapper[5003]: I1206 15:33:29.466964 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:29 crc kubenswrapper[5003]: I1206 15:33:29.467013 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:29 crc kubenswrapper[5003]: I1206 15:33:29.467033 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:29Z","lastTransitionTime":"2025-12-06T15:33:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:29 crc kubenswrapper[5003]: I1206 15:33:29.570011 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:29 crc kubenswrapper[5003]: I1206 15:33:29.570058 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:29 crc kubenswrapper[5003]: I1206 15:33:29.570068 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:29 crc kubenswrapper[5003]: I1206 15:33:29.570084 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:29 crc kubenswrapper[5003]: I1206 15:33:29.570093 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:29Z","lastTransitionTime":"2025-12-06T15:33:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:29 crc kubenswrapper[5003]: I1206 15:33:29.672984 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:29 crc kubenswrapper[5003]: I1206 15:33:29.673053 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:29 crc kubenswrapper[5003]: I1206 15:33:29.673079 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:29 crc kubenswrapper[5003]: I1206 15:33:29.673108 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:29 crc kubenswrapper[5003]: I1206 15:33:29.673131 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:29Z","lastTransitionTime":"2025-12-06T15:33:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:29 crc kubenswrapper[5003]: I1206 15:33:29.712173 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 06 15:33:29 crc kubenswrapper[5003]: I1206 15:33:29.712218 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 06 15:33:29 crc kubenswrapper[5003]: I1206 15:33:29.712278 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 06 15:33:29 crc kubenswrapper[5003]: E1206 15:33:29.712360 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 06 15:33:29 crc kubenswrapper[5003]: E1206 15:33:29.712424 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 06 15:33:29 crc kubenswrapper[5003]: E1206 15:33:29.712530 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 06 15:33:29 crc kubenswrapper[5003]: I1206 15:33:29.776259 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:29 crc kubenswrapper[5003]: I1206 15:33:29.776415 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:29 crc kubenswrapper[5003]: I1206 15:33:29.776426 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:29 crc kubenswrapper[5003]: I1206 15:33:29.776440 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:29 crc kubenswrapper[5003]: I1206 15:33:29.776450 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:29Z","lastTransitionTime":"2025-12-06T15:33:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:29 crc kubenswrapper[5003]: I1206 15:33:29.879417 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:29 crc kubenswrapper[5003]: I1206 15:33:29.879482 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:29 crc kubenswrapper[5003]: I1206 15:33:29.879556 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:29 crc kubenswrapper[5003]: I1206 15:33:29.879581 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:29 crc kubenswrapper[5003]: I1206 15:33:29.879600 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:29Z","lastTransitionTime":"2025-12-06T15:33:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:29 crc kubenswrapper[5003]: I1206 15:33:29.982599 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:29 crc kubenswrapper[5003]: I1206 15:33:29.982648 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:29 crc kubenswrapper[5003]: I1206 15:33:29.982658 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:29 crc kubenswrapper[5003]: I1206 15:33:29.982679 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:29 crc kubenswrapper[5003]: I1206 15:33:29.982692 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:29Z","lastTransitionTime":"2025-12-06T15:33:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:30 crc kubenswrapper[5003]: I1206 15:33:30.085954 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:30 crc kubenswrapper[5003]: I1206 15:33:30.086002 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:30 crc kubenswrapper[5003]: I1206 15:33:30.086011 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:30 crc kubenswrapper[5003]: I1206 15:33:30.086027 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:30 crc kubenswrapper[5003]: I1206 15:33:30.086037 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:30Z","lastTransitionTime":"2025-12-06T15:33:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:30 crc kubenswrapper[5003]: I1206 15:33:30.189409 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:30 crc kubenswrapper[5003]: I1206 15:33:30.189481 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:30 crc kubenswrapper[5003]: I1206 15:33:30.189551 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:30 crc kubenswrapper[5003]: I1206 15:33:30.189584 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:30 crc kubenswrapper[5003]: I1206 15:33:30.189613 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:30Z","lastTransitionTime":"2025-12-06T15:33:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:30 crc kubenswrapper[5003]: I1206 15:33:30.293814 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:30 crc kubenswrapper[5003]: I1206 15:33:30.293884 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:30 crc kubenswrapper[5003]: I1206 15:33:30.293902 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:30 crc kubenswrapper[5003]: I1206 15:33:30.293929 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:30 crc kubenswrapper[5003]: I1206 15:33:30.293947 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:30Z","lastTransitionTime":"2025-12-06T15:33:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:30 crc kubenswrapper[5003]: I1206 15:33:30.398386 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:30 crc kubenswrapper[5003]: I1206 15:33:30.398478 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:30 crc kubenswrapper[5003]: I1206 15:33:30.398548 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:30 crc kubenswrapper[5003]: I1206 15:33:30.398581 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:30 crc kubenswrapper[5003]: I1206 15:33:30.398603 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:30Z","lastTransitionTime":"2025-12-06T15:33:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:30 crc kubenswrapper[5003]: I1206 15:33:30.502314 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:30 crc kubenswrapper[5003]: I1206 15:33:30.502405 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:30 crc kubenswrapper[5003]: I1206 15:33:30.502423 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:30 crc kubenswrapper[5003]: I1206 15:33:30.502448 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:30 crc kubenswrapper[5003]: I1206 15:33:30.502470 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:30Z","lastTransitionTime":"2025-12-06T15:33:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:30 crc kubenswrapper[5003]: I1206 15:33:30.605458 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:30 crc kubenswrapper[5003]: I1206 15:33:30.605604 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:30 crc kubenswrapper[5003]: I1206 15:33:30.605634 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:30 crc kubenswrapper[5003]: I1206 15:33:30.605660 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:30 crc kubenswrapper[5003]: I1206 15:33:30.605680 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:30Z","lastTransitionTime":"2025-12-06T15:33:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:30 crc kubenswrapper[5003]: I1206 15:33:30.708888 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:30 crc kubenswrapper[5003]: I1206 15:33:30.708960 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:30 crc kubenswrapper[5003]: I1206 15:33:30.708978 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:30 crc kubenswrapper[5003]: I1206 15:33:30.709003 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:30 crc kubenswrapper[5003]: I1206 15:33:30.709020 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:30Z","lastTransitionTime":"2025-12-06T15:33:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:30 crc kubenswrapper[5003]: I1206 15:33:30.712159 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-jmzd9" Dec 06 15:33:30 crc kubenswrapper[5003]: E1206 15:33:30.712350 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-jmzd9" podUID="9fa121e1-7f2f-4912-945f-86cb199c3014" Dec 06 15:33:30 crc kubenswrapper[5003]: I1206 15:33:30.811766 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:30 crc kubenswrapper[5003]: I1206 15:33:30.811824 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:30 crc kubenswrapper[5003]: I1206 15:33:30.811841 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:30 crc kubenswrapper[5003]: I1206 15:33:30.811863 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:30 crc kubenswrapper[5003]: I1206 15:33:30.811880 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:30Z","lastTransitionTime":"2025-12-06T15:33:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:30 crc kubenswrapper[5003]: I1206 15:33:30.915075 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:30 crc kubenswrapper[5003]: I1206 15:33:30.915142 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:30 crc kubenswrapper[5003]: I1206 15:33:30.915166 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:30 crc kubenswrapper[5003]: I1206 15:33:30.915198 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:30 crc kubenswrapper[5003]: I1206 15:33:30.915222 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:30Z","lastTransitionTime":"2025-12-06T15:33:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:31 crc kubenswrapper[5003]: I1206 15:33:31.018372 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:31 crc kubenswrapper[5003]: I1206 15:33:31.018415 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:31 crc kubenswrapper[5003]: I1206 15:33:31.018424 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:31 crc kubenswrapper[5003]: I1206 15:33:31.018440 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:31 crc kubenswrapper[5003]: I1206 15:33:31.018451 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:31Z","lastTransitionTime":"2025-12-06T15:33:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:31 crc kubenswrapper[5003]: I1206 15:33:31.121379 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:31 crc kubenswrapper[5003]: I1206 15:33:31.121451 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:31 crc kubenswrapper[5003]: I1206 15:33:31.121474 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:31 crc kubenswrapper[5003]: I1206 15:33:31.121536 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:31 crc kubenswrapper[5003]: I1206 15:33:31.121563 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:31Z","lastTransitionTime":"2025-12-06T15:33:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:31 crc kubenswrapper[5003]: I1206 15:33:31.223659 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:31 crc kubenswrapper[5003]: I1206 15:33:31.223724 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:31 crc kubenswrapper[5003]: I1206 15:33:31.223737 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:31 crc kubenswrapper[5003]: I1206 15:33:31.223756 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:31 crc kubenswrapper[5003]: I1206 15:33:31.223768 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:31Z","lastTransitionTime":"2025-12-06T15:33:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:31 crc kubenswrapper[5003]: I1206 15:33:31.327148 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:31 crc kubenswrapper[5003]: I1206 15:33:31.327232 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:31 crc kubenswrapper[5003]: I1206 15:33:31.327265 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:31 crc kubenswrapper[5003]: I1206 15:33:31.327300 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:31 crc kubenswrapper[5003]: I1206 15:33:31.327323 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:31Z","lastTransitionTime":"2025-12-06T15:33:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:31 crc kubenswrapper[5003]: I1206 15:33:31.429991 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:31 crc kubenswrapper[5003]: I1206 15:33:31.430094 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:31 crc kubenswrapper[5003]: I1206 15:33:31.430128 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:31 crc kubenswrapper[5003]: I1206 15:33:31.430157 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:31 crc kubenswrapper[5003]: I1206 15:33:31.430178 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:31Z","lastTransitionTime":"2025-12-06T15:33:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:31 crc kubenswrapper[5003]: I1206 15:33:31.535460 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:31 crc kubenswrapper[5003]: I1206 15:33:31.535552 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:31 crc kubenswrapper[5003]: I1206 15:33:31.535576 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:31 crc kubenswrapper[5003]: I1206 15:33:31.535754 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:31 crc kubenswrapper[5003]: I1206 15:33:31.535818 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:31Z","lastTransitionTime":"2025-12-06T15:33:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:31 crc kubenswrapper[5003]: I1206 15:33:31.640098 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:31 crc kubenswrapper[5003]: I1206 15:33:31.640369 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:31 crc kubenswrapper[5003]: I1206 15:33:31.640396 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:31 crc kubenswrapper[5003]: I1206 15:33:31.640418 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:31 crc kubenswrapper[5003]: I1206 15:33:31.640435 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:31Z","lastTransitionTime":"2025-12-06T15:33:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:31 crc kubenswrapper[5003]: I1206 15:33:31.711578 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 06 15:33:31 crc kubenswrapper[5003]: I1206 15:33:31.711656 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 06 15:33:31 crc kubenswrapper[5003]: E1206 15:33:31.711738 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 06 15:33:31 crc kubenswrapper[5003]: E1206 15:33:31.711798 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 06 15:33:31 crc kubenswrapper[5003]: I1206 15:33:31.711901 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 06 15:33:31 crc kubenswrapper[5003]: E1206 15:33:31.712062 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 06 15:33:31 crc kubenswrapper[5003]: I1206 15:33:31.733253 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/node-resolver-qcqkl" podStartSLOduration=66.733235327 podStartE2EDuration="1m6.733235327s" podCreationTimestamp="2025-12-06 15:32:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 15:33:31.732915918 +0000 UTC m=+90.266270309" watchObservedRunningTime="2025-12-06 15:33:31.733235327 +0000 UTC m=+90.266589718" Dec 06 15:33:31 crc kubenswrapper[5003]: I1206 15:33:31.742149 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:31 crc kubenswrapper[5003]: I1206 15:33:31.742173 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:31 crc kubenswrapper[5003]: I1206 15:33:31.742182 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:31 crc kubenswrapper[5003]: I1206 15:33:31.742193 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:31 crc kubenswrapper[5003]: I1206 15:33:31.742202 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:31Z","lastTransitionTime":"2025-12-06T15:33:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:31 crc kubenswrapper[5003]: I1206 15:33:31.749652 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-9kdpn" podStartSLOduration=65.749628966 podStartE2EDuration="1m5.749628966s" podCreationTimestamp="2025-12-06 15:32:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 15:33:31.748140217 +0000 UTC m=+90.281494598" watchObservedRunningTime="2025-12-06 15:33:31.749628966 +0000 UTC m=+90.282983367" Dec 06 15:33:31 crc kubenswrapper[5003]: I1206 15:33:31.796745 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/node-ca-mdz5n" podStartSLOduration=66.796731609 podStartE2EDuration="1m6.796731609s" podCreationTimestamp="2025-12-06 15:32:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 15:33:31.794274003 +0000 UTC m=+90.327628384" watchObservedRunningTime="2025-12-06 15:33:31.796731609 +0000 UTC m=+90.330085990" Dec 06 15:33:31 crc kubenswrapper[5003]: I1206 15:33:31.831427 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-daemon-w25db" podStartSLOduration=66.831396639 podStartE2EDuration="1m6.831396639s" podCreationTimestamp="2025-12-06 15:32:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 15:33:31.817201198 +0000 UTC m=+90.350555589" watchObservedRunningTime="2025-12-06 15:33:31.831396639 +0000 UTC m=+90.364751030" Dec 06 15:33:31 crc kubenswrapper[5003]: I1206 15:33:31.831634 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-7788j" podStartSLOduration=65.831627426 podStartE2EDuration="1m5.831627426s" podCreationTimestamp="2025-12-06 15:32:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 15:33:31.831296456 +0000 UTC m=+90.364650887" watchObservedRunningTime="2025-12-06 15:33:31.831627426 +0000 UTC m=+90.364981827" Dec 06 15:33:31 crc kubenswrapper[5003]: I1206 15:33:31.844188 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:31 crc kubenswrapper[5003]: I1206 15:33:31.844236 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:31 crc kubenswrapper[5003]: I1206 15:33:31.844248 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:31 crc kubenswrapper[5003]: I1206 15:33:31.844266 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:31 crc kubenswrapper[5003]: I1206 15:33:31.844277 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:31Z","lastTransitionTime":"2025-12-06T15:33:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:31 crc kubenswrapper[5003]: I1206 15:33:31.844329 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" podStartSLOduration=36.844313395 podStartE2EDuration="36.844313395s" podCreationTimestamp="2025-12-06 15:32:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 15:33:31.844207812 +0000 UTC m=+90.377562213" watchObservedRunningTime="2025-12-06 15:33:31.844313395 +0000 UTC m=+90.377667776" Dec 06 15:33:31 crc kubenswrapper[5003]: I1206 15:33:31.870065 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podStartSLOduration=70.870042035 podStartE2EDuration="1m10.870042035s" podCreationTimestamp="2025-12-06 15:32:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 15:33:31.869009888 +0000 UTC m=+90.402364299" watchObservedRunningTime="2025-12-06 15:33:31.870042035 +0000 UTC m=+90.403396436" Dec 06 15:33:31 crc kubenswrapper[5003]: I1206 15:33:31.936186 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-additional-cni-plugins-j4rf7" podStartSLOduration=65.936165678 podStartE2EDuration="1m5.936165678s" podCreationTimestamp="2025-12-06 15:32:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 15:33:31.917678162 +0000 UTC m=+90.451032543" watchObservedRunningTime="2025-12-06 15:33:31.936165678 +0000 UTC m=+90.469520049" Dec 06 15:33:31 crc kubenswrapper[5003]: I1206 15:33:31.936354 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=70.936350873 podStartE2EDuration="1m10.936350873s" podCreationTimestamp="2025-12-06 15:32:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 15:33:31.935956553 +0000 UTC m=+90.469310954" watchObservedRunningTime="2025-12-06 15:33:31.936350873 +0000 UTC m=+90.469705254" Dec 06 15:33:31 crc kubenswrapper[5003]: I1206 15:33:31.946340 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:31 crc kubenswrapper[5003]: I1206 15:33:31.946653 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:31 crc kubenswrapper[5003]: I1206 15:33:31.946664 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:31 crc kubenswrapper[5003]: I1206 15:33:31.946681 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:31 crc kubenswrapper[5003]: I1206 15:33:31.946690 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:31Z","lastTransitionTime":"2025-12-06T15:33:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:31 crc kubenswrapper[5003]: I1206 15:33:31.961804 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd/etcd-crc" podStartSLOduration=11.961787085 podStartE2EDuration="11.961787085s" podCreationTimestamp="2025-12-06 15:33:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 15:33:31.96012433 +0000 UTC m=+90.493478731" watchObservedRunningTime="2025-12-06 15:33:31.961787085 +0000 UTC m=+90.495141466" Dec 06 15:33:32 crc kubenswrapper[5003]: I1206 15:33:32.048680 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:32 crc kubenswrapper[5003]: I1206 15:33:32.049124 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:32 crc kubenswrapper[5003]: I1206 15:33:32.049142 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:32 crc kubenswrapper[5003]: I1206 15:33:32.049169 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:32 crc kubenswrapper[5003]: I1206 15:33:32.049181 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:32Z","lastTransitionTime":"2025-12-06T15:33:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:32 crc kubenswrapper[5003]: I1206 15:33:32.151401 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:32 crc kubenswrapper[5003]: I1206 15:33:32.151650 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:32 crc kubenswrapper[5003]: I1206 15:33:32.151728 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:32 crc kubenswrapper[5003]: I1206 15:33:32.151795 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:32 crc kubenswrapper[5003]: I1206 15:33:32.151859 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:32Z","lastTransitionTime":"2025-12-06T15:33:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:32 crc kubenswrapper[5003]: I1206 15:33:32.253671 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:32 crc kubenswrapper[5003]: I1206 15:33:32.253746 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:32 crc kubenswrapper[5003]: I1206 15:33:32.253773 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:32 crc kubenswrapper[5003]: I1206 15:33:32.253802 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:32 crc kubenswrapper[5003]: I1206 15:33:32.253823 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:32Z","lastTransitionTime":"2025-12-06T15:33:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:32 crc kubenswrapper[5003]: I1206 15:33:32.355851 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:32 crc kubenswrapper[5003]: I1206 15:33:32.355891 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:32 crc kubenswrapper[5003]: I1206 15:33:32.355920 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:32 crc kubenswrapper[5003]: I1206 15:33:32.355934 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:32 crc kubenswrapper[5003]: I1206 15:33:32.355962 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:32Z","lastTransitionTime":"2025-12-06T15:33:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:32 crc kubenswrapper[5003]: I1206 15:33:32.458668 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:32 crc kubenswrapper[5003]: I1206 15:33:32.458901 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:32 crc kubenswrapper[5003]: I1206 15:33:32.458974 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:32 crc kubenswrapper[5003]: I1206 15:33:32.459041 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:32 crc kubenswrapper[5003]: I1206 15:33:32.459104 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:32Z","lastTransitionTime":"2025-12-06T15:33:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:32 crc kubenswrapper[5003]: I1206 15:33:32.561755 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:32 crc kubenswrapper[5003]: I1206 15:33:32.561787 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:32 crc kubenswrapper[5003]: I1206 15:33:32.561797 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:32 crc kubenswrapper[5003]: I1206 15:33:32.561810 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:32 crc kubenswrapper[5003]: I1206 15:33:32.561819 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:32Z","lastTransitionTime":"2025-12-06T15:33:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:32 crc kubenswrapper[5003]: I1206 15:33:32.664503 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:32 crc kubenswrapper[5003]: I1206 15:33:32.664547 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:32 crc kubenswrapper[5003]: I1206 15:33:32.664559 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:32 crc kubenswrapper[5003]: I1206 15:33:32.664575 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:32 crc kubenswrapper[5003]: I1206 15:33:32.664586 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:32Z","lastTransitionTime":"2025-12-06T15:33:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:32 crc kubenswrapper[5003]: I1206 15:33:32.712088 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-jmzd9" Dec 06 15:33:32 crc kubenswrapper[5003]: E1206 15:33:32.712287 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-jmzd9" podUID="9fa121e1-7f2f-4912-945f-86cb199c3014" Dec 06 15:33:32 crc kubenswrapper[5003]: I1206 15:33:32.767895 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:32 crc kubenswrapper[5003]: I1206 15:33:32.767985 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:32 crc kubenswrapper[5003]: I1206 15:33:32.768004 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:32 crc kubenswrapper[5003]: I1206 15:33:32.768028 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:32 crc kubenswrapper[5003]: I1206 15:33:32.768049 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:32Z","lastTransitionTime":"2025-12-06T15:33:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:32 crc kubenswrapper[5003]: I1206 15:33:32.870217 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:32 crc kubenswrapper[5003]: I1206 15:33:32.870256 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:32 crc kubenswrapper[5003]: I1206 15:33:32.870264 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:32 crc kubenswrapper[5003]: I1206 15:33:32.870277 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:32 crc kubenswrapper[5003]: I1206 15:33:32.870285 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:32Z","lastTransitionTime":"2025-12-06T15:33:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:32 crc kubenswrapper[5003]: I1206 15:33:32.973372 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:32 crc kubenswrapper[5003]: I1206 15:33:32.973416 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:32 crc kubenswrapper[5003]: I1206 15:33:32.973425 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:32 crc kubenswrapper[5003]: I1206 15:33:32.973437 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:32 crc kubenswrapper[5003]: I1206 15:33:32.973446 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:32Z","lastTransitionTime":"2025-12-06T15:33:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:33 crc kubenswrapper[5003]: I1206 15:33:33.076339 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:33 crc kubenswrapper[5003]: I1206 15:33:33.076398 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:33 crc kubenswrapper[5003]: I1206 15:33:33.076424 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:33 crc kubenswrapper[5003]: I1206 15:33:33.076470 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:33 crc kubenswrapper[5003]: I1206 15:33:33.076542 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:33Z","lastTransitionTime":"2025-12-06T15:33:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:33 crc kubenswrapper[5003]: I1206 15:33:33.179832 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:33 crc kubenswrapper[5003]: I1206 15:33:33.179902 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:33 crc kubenswrapper[5003]: I1206 15:33:33.179925 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:33 crc kubenswrapper[5003]: I1206 15:33:33.179954 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:33 crc kubenswrapper[5003]: I1206 15:33:33.179975 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:33Z","lastTransitionTime":"2025-12-06T15:33:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:33 crc kubenswrapper[5003]: I1206 15:33:33.283759 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:33 crc kubenswrapper[5003]: I1206 15:33:33.283825 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:33 crc kubenswrapper[5003]: I1206 15:33:33.283837 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:33 crc kubenswrapper[5003]: I1206 15:33:33.283859 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:33 crc kubenswrapper[5003]: I1206 15:33:33.283874 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:33Z","lastTransitionTime":"2025-12-06T15:33:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:33 crc kubenswrapper[5003]: I1206 15:33:33.387570 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:33 crc kubenswrapper[5003]: I1206 15:33:33.387640 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:33 crc kubenswrapper[5003]: I1206 15:33:33.387652 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:33 crc kubenswrapper[5003]: I1206 15:33:33.387674 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:33 crc kubenswrapper[5003]: I1206 15:33:33.387691 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:33Z","lastTransitionTime":"2025-12-06T15:33:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:33 crc kubenswrapper[5003]: I1206 15:33:33.491110 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:33 crc kubenswrapper[5003]: I1206 15:33:33.491169 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:33 crc kubenswrapper[5003]: I1206 15:33:33.491188 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:33 crc kubenswrapper[5003]: I1206 15:33:33.491213 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:33 crc kubenswrapper[5003]: I1206 15:33:33.491232 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:33Z","lastTransitionTime":"2025-12-06T15:33:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:33 crc kubenswrapper[5003]: I1206 15:33:33.594579 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:33 crc kubenswrapper[5003]: I1206 15:33:33.594693 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:33 crc kubenswrapper[5003]: I1206 15:33:33.594705 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:33 crc kubenswrapper[5003]: I1206 15:33:33.594727 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:33 crc kubenswrapper[5003]: I1206 15:33:33.594748 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:33Z","lastTransitionTime":"2025-12-06T15:33:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:33 crc kubenswrapper[5003]: I1206 15:33:33.697336 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:33 crc kubenswrapper[5003]: I1206 15:33:33.697394 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:33 crc kubenswrapper[5003]: I1206 15:33:33.697409 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:33 crc kubenswrapper[5003]: I1206 15:33:33.697428 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:33 crc kubenswrapper[5003]: I1206 15:33:33.697462 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:33Z","lastTransitionTime":"2025-12-06T15:33:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:33 crc kubenswrapper[5003]: I1206 15:33:33.712363 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 06 15:33:33 crc kubenswrapper[5003]: I1206 15:33:33.712409 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 06 15:33:33 crc kubenswrapper[5003]: I1206 15:33:33.712386 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 06 15:33:33 crc kubenswrapper[5003]: E1206 15:33:33.712542 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 06 15:33:33 crc kubenswrapper[5003]: E1206 15:33:33.712596 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 06 15:33:33 crc kubenswrapper[5003]: E1206 15:33:33.712686 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 06 15:33:33 crc kubenswrapper[5003]: I1206 15:33:33.800305 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:33 crc kubenswrapper[5003]: I1206 15:33:33.800391 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:33 crc kubenswrapper[5003]: I1206 15:33:33.800421 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:33 crc kubenswrapper[5003]: I1206 15:33:33.800451 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:33 crc kubenswrapper[5003]: I1206 15:33:33.800474 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:33Z","lastTransitionTime":"2025-12-06T15:33:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:33 crc kubenswrapper[5003]: I1206 15:33:33.903363 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:33 crc kubenswrapper[5003]: I1206 15:33:33.903435 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:33 crc kubenswrapper[5003]: I1206 15:33:33.903459 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:33 crc kubenswrapper[5003]: I1206 15:33:33.903634 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:33 crc kubenswrapper[5003]: I1206 15:33:33.903705 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:33Z","lastTransitionTime":"2025-12-06T15:33:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:34 crc kubenswrapper[5003]: I1206 15:33:34.006625 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:34 crc kubenswrapper[5003]: I1206 15:33:34.006669 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:34 crc kubenswrapper[5003]: I1206 15:33:34.006678 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:34 crc kubenswrapper[5003]: I1206 15:33:34.006692 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:34 crc kubenswrapper[5003]: I1206 15:33:34.006701 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:34Z","lastTransitionTime":"2025-12-06T15:33:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:34 crc kubenswrapper[5003]: I1206 15:33:34.048031 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-p7xwd" Dec 06 15:33:34 crc kubenswrapper[5003]: I1206 15:33:34.050132 5003 scope.go:117] "RemoveContainer" containerID="9517eb57092dd5943648e1004f42c5da7be2e94b4e0460d9595a63ef5194be22" Dec 06 15:33:34 crc kubenswrapper[5003]: E1206 15:33:34.050433 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-p7xwd_openshift-ovn-kubernetes(8a695d94-271c-45bc-8a89-dfdecb57ec00)\"" pod="openshift-ovn-kubernetes/ovnkube-node-p7xwd" podUID="8a695d94-271c-45bc-8a89-dfdecb57ec00" Dec 06 15:33:34 crc kubenswrapper[5003]: I1206 15:33:34.109643 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:34 crc kubenswrapper[5003]: I1206 15:33:34.109705 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:34 crc kubenswrapper[5003]: I1206 15:33:34.109722 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:34 crc kubenswrapper[5003]: I1206 15:33:34.109743 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:34 crc kubenswrapper[5003]: I1206 15:33:34.109760 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:34Z","lastTransitionTime":"2025-12-06T15:33:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:34 crc kubenswrapper[5003]: I1206 15:33:34.212722 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:34 crc kubenswrapper[5003]: I1206 15:33:34.212786 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:34 crc kubenswrapper[5003]: I1206 15:33:34.212804 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:34 crc kubenswrapper[5003]: I1206 15:33:34.212829 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:34 crc kubenswrapper[5003]: I1206 15:33:34.212847 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:34Z","lastTransitionTime":"2025-12-06T15:33:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:34 crc kubenswrapper[5003]: I1206 15:33:34.315627 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:34 crc kubenswrapper[5003]: I1206 15:33:34.315662 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:34 crc kubenswrapper[5003]: I1206 15:33:34.315670 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:34 crc kubenswrapper[5003]: I1206 15:33:34.315683 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:34 crc kubenswrapper[5003]: I1206 15:33:34.315692 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:34Z","lastTransitionTime":"2025-12-06T15:33:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:34 crc kubenswrapper[5003]: I1206 15:33:34.418599 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:34 crc kubenswrapper[5003]: I1206 15:33:34.418653 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:34 crc kubenswrapper[5003]: I1206 15:33:34.418664 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:34 crc kubenswrapper[5003]: I1206 15:33:34.418682 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:34 crc kubenswrapper[5003]: I1206 15:33:34.418695 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:34Z","lastTransitionTime":"2025-12-06T15:33:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:34 crc kubenswrapper[5003]: I1206 15:33:34.521462 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:34 crc kubenswrapper[5003]: I1206 15:33:34.521593 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:34 crc kubenswrapper[5003]: I1206 15:33:34.521623 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:34 crc kubenswrapper[5003]: I1206 15:33:34.521650 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:34 crc kubenswrapper[5003]: I1206 15:33:34.521667 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:34Z","lastTransitionTime":"2025-12-06T15:33:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:34 crc kubenswrapper[5003]: I1206 15:33:34.624391 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:34 crc kubenswrapper[5003]: I1206 15:33:34.624431 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:34 crc kubenswrapper[5003]: I1206 15:33:34.624445 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:34 crc kubenswrapper[5003]: I1206 15:33:34.624463 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:34 crc kubenswrapper[5003]: I1206 15:33:34.624475 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:34Z","lastTransitionTime":"2025-12-06T15:33:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:34 crc kubenswrapper[5003]: I1206 15:33:34.712170 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-jmzd9" Dec 06 15:33:34 crc kubenswrapper[5003]: E1206 15:33:34.712356 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-jmzd9" podUID="9fa121e1-7f2f-4912-945f-86cb199c3014" Dec 06 15:33:34 crc kubenswrapper[5003]: I1206 15:33:34.728033 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:34 crc kubenswrapper[5003]: I1206 15:33:34.728101 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:34 crc kubenswrapper[5003]: I1206 15:33:34.728127 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:34 crc kubenswrapper[5003]: I1206 15:33:34.728158 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:34 crc kubenswrapper[5003]: I1206 15:33:34.728180 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:34Z","lastTransitionTime":"2025-12-06T15:33:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:34 crc kubenswrapper[5003]: I1206 15:33:34.830247 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:34 crc kubenswrapper[5003]: I1206 15:33:34.830309 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:34 crc kubenswrapper[5003]: I1206 15:33:34.830327 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:34 crc kubenswrapper[5003]: I1206 15:33:34.830349 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:34 crc kubenswrapper[5003]: I1206 15:33:34.830368 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:34Z","lastTransitionTime":"2025-12-06T15:33:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:34 crc kubenswrapper[5003]: I1206 15:33:34.933129 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:34 crc kubenswrapper[5003]: I1206 15:33:34.933218 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:34 crc kubenswrapper[5003]: I1206 15:33:34.933249 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:34 crc kubenswrapper[5003]: I1206 15:33:34.933276 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:34 crc kubenswrapper[5003]: I1206 15:33:34.933295 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:34Z","lastTransitionTime":"2025-12-06T15:33:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:35 crc kubenswrapper[5003]: I1206 15:33:35.036039 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:35 crc kubenswrapper[5003]: I1206 15:33:35.036115 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:35 crc kubenswrapper[5003]: I1206 15:33:35.036135 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:35 crc kubenswrapper[5003]: I1206 15:33:35.036158 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:35 crc kubenswrapper[5003]: I1206 15:33:35.036176 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:35Z","lastTransitionTime":"2025-12-06T15:33:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:35 crc kubenswrapper[5003]: I1206 15:33:35.139009 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:35 crc kubenswrapper[5003]: I1206 15:33:35.139078 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:35 crc kubenswrapper[5003]: I1206 15:33:35.139096 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:35 crc kubenswrapper[5003]: I1206 15:33:35.139122 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:35 crc kubenswrapper[5003]: I1206 15:33:35.139139 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:35Z","lastTransitionTime":"2025-12-06T15:33:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:35 crc kubenswrapper[5003]: I1206 15:33:35.241882 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:35 crc kubenswrapper[5003]: I1206 15:33:35.241963 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:35 crc kubenswrapper[5003]: I1206 15:33:35.241987 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:35 crc kubenswrapper[5003]: I1206 15:33:35.242018 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:35 crc kubenswrapper[5003]: I1206 15:33:35.242043 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:35Z","lastTransitionTime":"2025-12-06T15:33:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:35 crc kubenswrapper[5003]: I1206 15:33:35.345451 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:35 crc kubenswrapper[5003]: I1206 15:33:35.345564 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:35 crc kubenswrapper[5003]: I1206 15:33:35.345583 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:35 crc kubenswrapper[5003]: I1206 15:33:35.345604 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:35 crc kubenswrapper[5003]: I1206 15:33:35.345622 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:35Z","lastTransitionTime":"2025-12-06T15:33:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:35 crc kubenswrapper[5003]: I1206 15:33:35.448186 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:35 crc kubenswrapper[5003]: I1206 15:33:35.448248 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:35 crc kubenswrapper[5003]: I1206 15:33:35.448274 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:35 crc kubenswrapper[5003]: I1206 15:33:35.448305 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:35 crc kubenswrapper[5003]: I1206 15:33:35.448342 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:35Z","lastTransitionTime":"2025-12-06T15:33:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:35 crc kubenswrapper[5003]: I1206 15:33:35.550867 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:35 crc kubenswrapper[5003]: I1206 15:33:35.550908 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:35 crc kubenswrapper[5003]: I1206 15:33:35.550919 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:35 crc kubenswrapper[5003]: I1206 15:33:35.550934 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:35 crc kubenswrapper[5003]: I1206 15:33:35.550946 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:35Z","lastTransitionTime":"2025-12-06T15:33:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:35 crc kubenswrapper[5003]: I1206 15:33:35.653420 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:35 crc kubenswrapper[5003]: I1206 15:33:35.653527 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:35 crc kubenswrapper[5003]: I1206 15:33:35.653546 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:35 crc kubenswrapper[5003]: I1206 15:33:35.653569 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:35 crc kubenswrapper[5003]: I1206 15:33:35.653588 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:35Z","lastTransitionTime":"2025-12-06T15:33:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:35 crc kubenswrapper[5003]: I1206 15:33:35.712410 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 06 15:33:35 crc kubenswrapper[5003]: I1206 15:33:35.712548 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 06 15:33:35 crc kubenswrapper[5003]: E1206 15:33:35.712613 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 06 15:33:35 crc kubenswrapper[5003]: I1206 15:33:35.712639 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 06 15:33:35 crc kubenswrapper[5003]: E1206 15:33:35.712786 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 06 15:33:35 crc kubenswrapper[5003]: E1206 15:33:35.713098 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 06 15:33:35 crc kubenswrapper[5003]: I1206 15:33:35.724652 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/kube-rbac-proxy-crio-crc"] Dec 06 15:33:35 crc kubenswrapper[5003]: I1206 15:33:35.756646 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:35 crc kubenswrapper[5003]: I1206 15:33:35.756724 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:35 crc kubenswrapper[5003]: I1206 15:33:35.756756 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:35 crc kubenswrapper[5003]: I1206 15:33:35.756793 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:35 crc kubenswrapper[5003]: I1206 15:33:35.756815 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:35Z","lastTransitionTime":"2025-12-06T15:33:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:35 crc kubenswrapper[5003]: I1206 15:33:35.859659 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:35 crc kubenswrapper[5003]: I1206 15:33:35.859726 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:35 crc kubenswrapper[5003]: I1206 15:33:35.859737 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:35 crc kubenswrapper[5003]: I1206 15:33:35.859755 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:35 crc kubenswrapper[5003]: I1206 15:33:35.859768 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:35Z","lastTransitionTime":"2025-12-06T15:33:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:35 crc kubenswrapper[5003]: I1206 15:33:35.962718 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:35 crc kubenswrapper[5003]: I1206 15:33:35.962767 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:35 crc kubenswrapper[5003]: I1206 15:33:35.962778 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:35 crc kubenswrapper[5003]: I1206 15:33:35.962792 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:35 crc kubenswrapper[5003]: I1206 15:33:35.962801 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:35Z","lastTransitionTime":"2025-12-06T15:33:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:36 crc kubenswrapper[5003]: I1206 15:33:36.065388 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:36 crc kubenswrapper[5003]: I1206 15:33:36.065438 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:36 crc kubenswrapper[5003]: I1206 15:33:36.065449 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:36 crc kubenswrapper[5003]: I1206 15:33:36.065467 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:36 crc kubenswrapper[5003]: I1206 15:33:36.065480 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:36Z","lastTransitionTime":"2025-12-06T15:33:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:36 crc kubenswrapper[5003]: I1206 15:33:36.169290 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:36 crc kubenswrapper[5003]: I1206 15:33:36.169374 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:36 crc kubenswrapper[5003]: I1206 15:33:36.169402 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:36 crc kubenswrapper[5003]: I1206 15:33:36.169431 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:36 crc kubenswrapper[5003]: I1206 15:33:36.169451 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:36Z","lastTransitionTime":"2025-12-06T15:33:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:36 crc kubenswrapper[5003]: I1206 15:33:36.271886 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:36 crc kubenswrapper[5003]: I1206 15:33:36.271926 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:36 crc kubenswrapper[5003]: I1206 15:33:36.271937 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:36 crc kubenswrapper[5003]: I1206 15:33:36.271953 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:36 crc kubenswrapper[5003]: I1206 15:33:36.271965 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:36Z","lastTransitionTime":"2025-12-06T15:33:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:36 crc kubenswrapper[5003]: I1206 15:33:36.374110 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:36 crc kubenswrapper[5003]: I1206 15:33:36.374161 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:36 crc kubenswrapper[5003]: I1206 15:33:36.374170 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:36 crc kubenswrapper[5003]: I1206 15:33:36.374184 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:36 crc kubenswrapper[5003]: I1206 15:33:36.374193 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:36Z","lastTransitionTime":"2025-12-06T15:33:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:36 crc kubenswrapper[5003]: I1206 15:33:36.476442 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:36 crc kubenswrapper[5003]: I1206 15:33:36.476519 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:36 crc kubenswrapper[5003]: I1206 15:33:36.476537 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:36 crc kubenswrapper[5003]: I1206 15:33:36.476560 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:36 crc kubenswrapper[5003]: I1206 15:33:36.476579 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:36Z","lastTransitionTime":"2025-12-06T15:33:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:36 crc kubenswrapper[5003]: I1206 15:33:36.517778 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 15:33:36 crc kubenswrapper[5003]: I1206 15:33:36.517816 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 15:33:36 crc kubenswrapper[5003]: I1206 15:33:36.517826 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 15:33:36 crc kubenswrapper[5003]: I1206 15:33:36.517841 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 15:33:36 crc kubenswrapper[5003]: I1206 15:33:36.517862 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T15:33:36Z","lastTransitionTime":"2025-12-06T15:33:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 15:33:36 crc kubenswrapper[5003]: I1206 15:33:36.574956 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-version/cluster-version-operator-5c965bbfc6-hxh4d"] Dec 06 15:33:36 crc kubenswrapper[5003]: I1206 15:33:36.575403 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-hxh4d" Dec 06 15:33:36 crc kubenswrapper[5003]: I1206 15:33:36.591634 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Dec 06 15:33:36 crc kubenswrapper[5003]: I1206 15:33:36.592443 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Dec 06 15:33:36 crc kubenswrapper[5003]: I1206 15:33:36.592525 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Dec 06 15:33:36 crc kubenswrapper[5003]: I1206 15:33:36.592941 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Dec 06 15:33:36 crc kubenswrapper[5003]: I1206 15:33:36.711440 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-jmzd9" Dec 06 15:33:36 crc kubenswrapper[5003]: E1206 15:33:36.711617 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-jmzd9" podUID="9fa121e1-7f2f-4912-945f-86cb199c3014" Dec 06 15:33:36 crc kubenswrapper[5003]: I1206 15:33:36.726765 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/a9baf9d1-da3b-4f22-8012-68e804c8a756-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-hxh4d\" (UID: \"a9baf9d1-da3b-4f22-8012-68e804c8a756\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-hxh4d" Dec 06 15:33:36 crc kubenswrapper[5003]: I1206 15:33:36.726801 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/a9baf9d1-da3b-4f22-8012-68e804c8a756-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-hxh4d\" (UID: \"a9baf9d1-da3b-4f22-8012-68e804c8a756\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-hxh4d" Dec 06 15:33:36 crc kubenswrapper[5003]: I1206 15:33:36.726829 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a9baf9d1-da3b-4f22-8012-68e804c8a756-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-hxh4d\" (UID: \"a9baf9d1-da3b-4f22-8012-68e804c8a756\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-hxh4d" Dec 06 15:33:36 crc kubenswrapper[5003]: I1206 15:33:36.726866 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/a9baf9d1-da3b-4f22-8012-68e804c8a756-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-hxh4d\" (UID: \"a9baf9d1-da3b-4f22-8012-68e804c8a756\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-hxh4d" Dec 06 15:33:36 crc kubenswrapper[5003]: I1206 15:33:36.726901 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/a9baf9d1-da3b-4f22-8012-68e804c8a756-service-ca\") pod \"cluster-version-operator-5c965bbfc6-hxh4d\" (UID: \"a9baf9d1-da3b-4f22-8012-68e804c8a756\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-hxh4d" Dec 06 15:33:36 crc kubenswrapper[5003]: I1206 15:33:36.827538 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/a9baf9d1-da3b-4f22-8012-68e804c8a756-service-ca\") pod \"cluster-version-operator-5c965bbfc6-hxh4d\" (UID: \"a9baf9d1-da3b-4f22-8012-68e804c8a756\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-hxh4d" Dec 06 15:33:36 crc kubenswrapper[5003]: I1206 15:33:36.827641 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/a9baf9d1-da3b-4f22-8012-68e804c8a756-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-hxh4d\" (UID: \"a9baf9d1-da3b-4f22-8012-68e804c8a756\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-hxh4d" Dec 06 15:33:36 crc kubenswrapper[5003]: I1206 15:33:36.827700 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/a9baf9d1-da3b-4f22-8012-68e804c8a756-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-hxh4d\" (UID: \"a9baf9d1-da3b-4f22-8012-68e804c8a756\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-hxh4d" Dec 06 15:33:36 crc kubenswrapper[5003]: I1206 15:33:36.827733 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a9baf9d1-da3b-4f22-8012-68e804c8a756-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-hxh4d\" (UID: \"a9baf9d1-da3b-4f22-8012-68e804c8a756\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-hxh4d" Dec 06 15:33:36 crc kubenswrapper[5003]: I1206 15:33:36.827779 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/a9baf9d1-da3b-4f22-8012-68e804c8a756-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-hxh4d\" (UID: \"a9baf9d1-da3b-4f22-8012-68e804c8a756\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-hxh4d" Dec 06 15:33:36 crc kubenswrapper[5003]: I1206 15:33:36.827869 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/a9baf9d1-da3b-4f22-8012-68e804c8a756-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-hxh4d\" (UID: \"a9baf9d1-da3b-4f22-8012-68e804c8a756\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-hxh4d" Dec 06 15:33:36 crc kubenswrapper[5003]: I1206 15:33:36.827908 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/a9baf9d1-da3b-4f22-8012-68e804c8a756-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-hxh4d\" (UID: \"a9baf9d1-da3b-4f22-8012-68e804c8a756\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-hxh4d" Dec 06 15:33:36 crc kubenswrapper[5003]: I1206 15:33:36.828811 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/a9baf9d1-da3b-4f22-8012-68e804c8a756-service-ca\") pod \"cluster-version-operator-5c965bbfc6-hxh4d\" (UID: \"a9baf9d1-da3b-4f22-8012-68e804c8a756\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-hxh4d" Dec 06 15:33:36 crc kubenswrapper[5003]: I1206 15:33:36.847760 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a9baf9d1-da3b-4f22-8012-68e804c8a756-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-hxh4d\" (UID: \"a9baf9d1-da3b-4f22-8012-68e804c8a756\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-hxh4d" Dec 06 15:33:36 crc kubenswrapper[5003]: I1206 15:33:36.857713 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/a9baf9d1-da3b-4f22-8012-68e804c8a756-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-hxh4d\" (UID: \"a9baf9d1-da3b-4f22-8012-68e804c8a756\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-hxh4d" Dec 06 15:33:36 crc kubenswrapper[5003]: I1206 15:33:36.908628 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-hxh4d" Dec 06 15:33:37 crc kubenswrapper[5003]: I1206 15:33:37.237237 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-hxh4d" event={"ID":"a9baf9d1-da3b-4f22-8012-68e804c8a756","Type":"ContainerStarted","Data":"b49ec324432be855b620c9e4e965b68ba84ff14c79b2d8d835f56d2f6708c216"} Dec 06 15:33:37 crc kubenswrapper[5003]: I1206 15:33:37.237357 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-hxh4d" event={"ID":"a9baf9d1-da3b-4f22-8012-68e804c8a756","Type":"ContainerStarted","Data":"1717e08d235a77a002076ee11a6ed7c8ebd9655220ee2a5fc45d881ea48aff13"} Dec 06 15:33:37 crc kubenswrapper[5003]: I1206 15:33:37.262053 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" podStartSLOduration=2.261971753 podStartE2EDuration="2.261971753s" podCreationTimestamp="2025-12-06 15:33:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 15:33:36.615003846 +0000 UTC m=+95.148358227" watchObservedRunningTime="2025-12-06 15:33:37.261971753 +0000 UTC m=+95.795326194" Dec 06 15:33:37 crc kubenswrapper[5003]: I1206 15:33:37.265717 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-hxh4d" podStartSLOduration=71.265694433 podStartE2EDuration="1m11.265694433s" podCreationTimestamp="2025-12-06 15:32:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 15:33:37.260144984 +0000 UTC m=+95.793499435" watchObservedRunningTime="2025-12-06 15:33:37.265694433 +0000 UTC m=+95.799048844" Dec 06 15:33:37 crc kubenswrapper[5003]: I1206 15:33:37.711393 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 06 15:33:37 crc kubenswrapper[5003]: I1206 15:33:37.711544 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 06 15:33:37 crc kubenswrapper[5003]: E1206 15:33:37.711604 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 06 15:33:37 crc kubenswrapper[5003]: I1206 15:33:37.711627 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 06 15:33:37 crc kubenswrapper[5003]: E1206 15:33:37.711752 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 06 15:33:37 crc kubenswrapper[5003]: E1206 15:33:37.711997 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 06 15:33:38 crc kubenswrapper[5003]: I1206 15:33:38.712001 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-jmzd9" Dec 06 15:33:38 crc kubenswrapper[5003]: E1206 15:33:38.712568 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-jmzd9" podUID="9fa121e1-7f2f-4912-945f-86cb199c3014" Dec 06 15:33:39 crc kubenswrapper[5003]: I1206 15:33:39.712335 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 06 15:33:39 crc kubenswrapper[5003]: I1206 15:33:39.713526 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 06 15:33:39 crc kubenswrapper[5003]: I1206 15:33:39.713761 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 06 15:33:39 crc kubenswrapper[5003]: E1206 15:33:39.713851 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 06 15:33:39 crc kubenswrapper[5003]: E1206 15:33:39.713684 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 06 15:33:39 crc kubenswrapper[5003]: E1206 15:33:39.714602 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 06 15:33:40 crc kubenswrapper[5003]: I1206 15:33:40.711313 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-jmzd9" Dec 06 15:33:40 crc kubenswrapper[5003]: E1206 15:33:40.711579 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-jmzd9" podUID="9fa121e1-7f2f-4912-945f-86cb199c3014" Dec 06 15:33:41 crc kubenswrapper[5003]: I1206 15:33:41.712165 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 06 15:33:41 crc kubenswrapper[5003]: I1206 15:33:41.712165 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 06 15:33:41 crc kubenswrapper[5003]: E1206 15:33:41.712427 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 06 15:33:41 crc kubenswrapper[5003]: E1206 15:33:41.712319 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 06 15:33:41 crc kubenswrapper[5003]: I1206 15:33:41.712181 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 06 15:33:41 crc kubenswrapper[5003]: E1206 15:33:41.712561 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 06 15:33:42 crc kubenswrapper[5003]: I1206 15:33:42.711679 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-jmzd9" Dec 06 15:33:42 crc kubenswrapper[5003]: E1206 15:33:42.711902 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-jmzd9" podUID="9fa121e1-7f2f-4912-945f-86cb199c3014" Dec 06 15:33:43 crc kubenswrapper[5003]: I1206 15:33:43.711398 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 06 15:33:43 crc kubenswrapper[5003]: I1206 15:33:43.711536 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 06 15:33:43 crc kubenswrapper[5003]: E1206 15:33:43.711566 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 06 15:33:43 crc kubenswrapper[5003]: E1206 15:33:43.711684 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 06 15:33:43 crc kubenswrapper[5003]: I1206 15:33:43.711401 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 06 15:33:43 crc kubenswrapper[5003]: E1206 15:33:43.711807 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 06 15:33:44 crc kubenswrapper[5003]: I1206 15:33:44.711575 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-jmzd9" Dec 06 15:33:44 crc kubenswrapper[5003]: E1206 15:33:44.711784 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-jmzd9" podUID="9fa121e1-7f2f-4912-945f-86cb199c3014" Dec 06 15:33:45 crc kubenswrapper[5003]: I1206 15:33:45.122222 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/9fa121e1-7f2f-4912-945f-86cb199c3014-metrics-certs\") pod \"network-metrics-daemon-jmzd9\" (UID: \"9fa121e1-7f2f-4912-945f-86cb199c3014\") " pod="openshift-multus/network-metrics-daemon-jmzd9" Dec 06 15:33:45 crc kubenswrapper[5003]: E1206 15:33:45.122441 5003 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 06 15:33:45 crc kubenswrapper[5003]: E1206 15:33:45.122558 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/9fa121e1-7f2f-4912-945f-86cb199c3014-metrics-certs podName:9fa121e1-7f2f-4912-945f-86cb199c3014 nodeName:}" failed. No retries permitted until 2025-12-06 15:34:49.122537435 +0000 UTC m=+167.655891816 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/9fa121e1-7f2f-4912-945f-86cb199c3014-metrics-certs") pod "network-metrics-daemon-jmzd9" (UID: "9fa121e1-7f2f-4912-945f-86cb199c3014") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 06 15:33:45 crc kubenswrapper[5003]: I1206 15:33:45.712101 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 06 15:33:45 crc kubenswrapper[5003]: I1206 15:33:45.712321 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 06 15:33:45 crc kubenswrapper[5003]: E1206 15:33:45.712405 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 06 15:33:45 crc kubenswrapper[5003]: I1206 15:33:45.712460 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 06 15:33:45 crc kubenswrapper[5003]: E1206 15:33:45.712659 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 06 15:33:45 crc kubenswrapper[5003]: E1206 15:33:45.713064 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 06 15:33:46 crc kubenswrapper[5003]: I1206 15:33:46.712152 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-jmzd9" Dec 06 15:33:46 crc kubenswrapper[5003]: E1206 15:33:46.712444 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-jmzd9" podUID="9fa121e1-7f2f-4912-945f-86cb199c3014" Dec 06 15:33:46 crc kubenswrapper[5003]: I1206 15:33:46.713933 5003 scope.go:117] "RemoveContainer" containerID="9517eb57092dd5943648e1004f42c5da7be2e94b4e0460d9595a63ef5194be22" Dec 06 15:33:46 crc kubenswrapper[5003]: E1206 15:33:46.714202 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-p7xwd_openshift-ovn-kubernetes(8a695d94-271c-45bc-8a89-dfdecb57ec00)\"" pod="openshift-ovn-kubernetes/ovnkube-node-p7xwd" podUID="8a695d94-271c-45bc-8a89-dfdecb57ec00" Dec 06 15:33:47 crc kubenswrapper[5003]: I1206 15:33:47.711884 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 06 15:33:47 crc kubenswrapper[5003]: E1206 15:33:47.712025 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 06 15:33:47 crc kubenswrapper[5003]: I1206 15:33:47.712254 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 06 15:33:47 crc kubenswrapper[5003]: E1206 15:33:47.712315 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 06 15:33:47 crc kubenswrapper[5003]: I1206 15:33:47.712381 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 06 15:33:47 crc kubenswrapper[5003]: E1206 15:33:47.712566 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 06 15:33:48 crc kubenswrapper[5003]: I1206 15:33:48.711507 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-jmzd9" Dec 06 15:33:48 crc kubenswrapper[5003]: E1206 15:33:48.711822 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-jmzd9" podUID="9fa121e1-7f2f-4912-945f-86cb199c3014" Dec 06 15:33:49 crc kubenswrapper[5003]: I1206 15:33:49.711590 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 06 15:33:49 crc kubenswrapper[5003]: I1206 15:33:49.711696 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 06 15:33:49 crc kubenswrapper[5003]: E1206 15:33:49.711777 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 06 15:33:49 crc kubenswrapper[5003]: I1206 15:33:49.711854 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 06 15:33:49 crc kubenswrapper[5003]: E1206 15:33:49.711942 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 06 15:33:49 crc kubenswrapper[5003]: E1206 15:33:49.712082 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 06 15:33:50 crc kubenswrapper[5003]: I1206 15:33:50.711640 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-jmzd9" Dec 06 15:33:50 crc kubenswrapper[5003]: E1206 15:33:50.711820 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-jmzd9" podUID="9fa121e1-7f2f-4912-945f-86cb199c3014" Dec 06 15:33:51 crc kubenswrapper[5003]: I1206 15:33:51.711623 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 06 15:33:51 crc kubenswrapper[5003]: I1206 15:33:51.711696 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 06 15:33:51 crc kubenswrapper[5003]: I1206 15:33:51.713793 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 06 15:33:51 crc kubenswrapper[5003]: E1206 15:33:51.713791 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 06 15:33:51 crc kubenswrapper[5003]: E1206 15:33:51.713935 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 06 15:33:51 crc kubenswrapper[5003]: E1206 15:33:51.714097 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 06 15:33:52 crc kubenswrapper[5003]: I1206 15:33:52.712028 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-jmzd9" Dec 06 15:33:52 crc kubenswrapper[5003]: E1206 15:33:52.722641 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-jmzd9" podUID="9fa121e1-7f2f-4912-945f-86cb199c3014" Dec 06 15:33:53 crc kubenswrapper[5003]: I1206 15:33:53.711885 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 06 15:33:53 crc kubenswrapper[5003]: I1206 15:33:53.711950 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 06 15:33:53 crc kubenswrapper[5003]: E1206 15:33:53.712003 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 06 15:33:53 crc kubenswrapper[5003]: E1206 15:33:53.712060 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 06 15:33:53 crc kubenswrapper[5003]: I1206 15:33:53.711954 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 06 15:33:53 crc kubenswrapper[5003]: E1206 15:33:53.712313 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 06 15:33:54 crc kubenswrapper[5003]: I1206 15:33:54.711946 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-jmzd9" Dec 06 15:33:54 crc kubenswrapper[5003]: E1206 15:33:54.712153 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-jmzd9" podUID="9fa121e1-7f2f-4912-945f-86cb199c3014" Dec 06 15:33:55 crc kubenswrapper[5003]: I1206 15:33:55.712127 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 06 15:33:55 crc kubenswrapper[5003]: E1206 15:33:55.712280 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 06 15:33:55 crc kubenswrapper[5003]: I1206 15:33:55.712134 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 06 15:33:55 crc kubenswrapper[5003]: I1206 15:33:55.712130 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 06 15:33:55 crc kubenswrapper[5003]: E1206 15:33:55.712472 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 06 15:33:55 crc kubenswrapper[5003]: E1206 15:33:55.712590 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 06 15:33:56 crc kubenswrapper[5003]: I1206 15:33:56.711637 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-jmzd9" Dec 06 15:33:56 crc kubenswrapper[5003]: E1206 15:33:56.712051 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-jmzd9" podUID="9fa121e1-7f2f-4912-945f-86cb199c3014" Dec 06 15:33:57 crc kubenswrapper[5003]: I1206 15:33:57.711936 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 06 15:33:57 crc kubenswrapper[5003]: I1206 15:33:57.711939 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 06 15:33:57 crc kubenswrapper[5003]: I1206 15:33:57.711945 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 06 15:33:57 crc kubenswrapper[5003]: E1206 15:33:57.712255 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 06 15:33:57 crc kubenswrapper[5003]: E1206 15:33:57.712358 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 06 15:33:57 crc kubenswrapper[5003]: E1206 15:33:57.712467 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 06 15:33:58 crc kubenswrapper[5003]: I1206 15:33:58.711848 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-jmzd9" Dec 06 15:33:58 crc kubenswrapper[5003]: E1206 15:33:58.712111 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-jmzd9" podUID="9fa121e1-7f2f-4912-945f-86cb199c3014" Dec 06 15:33:59 crc kubenswrapper[5003]: I1206 15:33:59.712429 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 06 15:33:59 crc kubenswrapper[5003]: I1206 15:33:59.712438 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 06 15:33:59 crc kubenswrapper[5003]: E1206 15:33:59.712672 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 06 15:33:59 crc kubenswrapper[5003]: I1206 15:33:59.712746 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 06 15:33:59 crc kubenswrapper[5003]: E1206 15:33:59.713191 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 06 15:33:59 crc kubenswrapper[5003]: E1206 15:33:59.713608 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 06 15:34:00 crc kubenswrapper[5003]: I1206 15:34:00.711717 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-jmzd9" Dec 06 15:34:00 crc kubenswrapper[5003]: E1206 15:34:00.711877 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-jmzd9" podUID="9fa121e1-7f2f-4912-945f-86cb199c3014" Dec 06 15:34:01 crc kubenswrapper[5003]: E1206 15:34:01.683830 5003 kubelet_node_status.go:497] "Node not becoming ready in time after startup" Dec 06 15:34:01 crc kubenswrapper[5003]: I1206 15:34:01.711878 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 06 15:34:01 crc kubenswrapper[5003]: I1206 15:34:01.711748 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 06 15:34:01 crc kubenswrapper[5003]: E1206 15:34:01.714989 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 06 15:34:01 crc kubenswrapper[5003]: I1206 15:34:01.715382 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 06 15:34:01 crc kubenswrapper[5003]: E1206 15:34:01.715589 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 06 15:34:01 crc kubenswrapper[5003]: E1206 15:34:01.716652 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 06 15:34:01 crc kubenswrapper[5003]: I1206 15:34:01.718164 5003 scope.go:117] "RemoveContainer" containerID="9517eb57092dd5943648e1004f42c5da7be2e94b4e0460d9595a63ef5194be22" Dec 06 15:34:01 crc kubenswrapper[5003]: E1206 15:34:01.721749 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-p7xwd_openshift-ovn-kubernetes(8a695d94-271c-45bc-8a89-dfdecb57ec00)\"" pod="openshift-ovn-kubernetes/ovnkube-node-p7xwd" podUID="8a695d94-271c-45bc-8a89-dfdecb57ec00" Dec 06 15:34:01 crc kubenswrapper[5003]: E1206 15:34:01.916943 5003 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 06 15:34:02 crc kubenswrapper[5003]: I1206 15:34:02.329048 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-9kdpn_350e8b9a-b7bf-4dc9-abe9-d10f7a088be3/kube-multus/1.log" Dec 06 15:34:02 crc kubenswrapper[5003]: I1206 15:34:02.329868 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-9kdpn_350e8b9a-b7bf-4dc9-abe9-d10f7a088be3/kube-multus/0.log" Dec 06 15:34:02 crc kubenswrapper[5003]: I1206 15:34:02.329956 5003 generic.go:334] "Generic (PLEG): container finished" podID="350e8b9a-b7bf-4dc9-abe9-d10f7a088be3" containerID="22d16feb3425c5cac7562c4468723b0aae567d2d31db5516b3b0ce7d38d91c6b" exitCode=1 Dec 06 15:34:02 crc kubenswrapper[5003]: I1206 15:34:02.330004 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-9kdpn" event={"ID":"350e8b9a-b7bf-4dc9-abe9-d10f7a088be3","Type":"ContainerDied","Data":"22d16feb3425c5cac7562c4468723b0aae567d2d31db5516b3b0ce7d38d91c6b"} Dec 06 15:34:02 crc kubenswrapper[5003]: I1206 15:34:02.330056 5003 scope.go:117] "RemoveContainer" containerID="e9b32883e20da5b298374f7eebd0a21c5ca51cca23543fc78dfe3edb04e3b661" Dec 06 15:34:02 crc kubenswrapper[5003]: I1206 15:34:02.330824 5003 scope.go:117] "RemoveContainer" containerID="22d16feb3425c5cac7562c4468723b0aae567d2d31db5516b3b0ce7d38d91c6b" Dec 06 15:34:02 crc kubenswrapper[5003]: E1206 15:34:02.331157 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-multus pod=multus-9kdpn_openshift-multus(350e8b9a-b7bf-4dc9-abe9-d10f7a088be3)\"" pod="openshift-multus/multus-9kdpn" podUID="350e8b9a-b7bf-4dc9-abe9-d10f7a088be3" Dec 06 15:34:02 crc kubenswrapper[5003]: I1206 15:34:02.711705 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-jmzd9" Dec 06 15:34:02 crc kubenswrapper[5003]: E1206 15:34:02.712128 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-jmzd9" podUID="9fa121e1-7f2f-4912-945f-86cb199c3014" Dec 06 15:34:03 crc kubenswrapper[5003]: I1206 15:34:03.335325 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-9kdpn_350e8b9a-b7bf-4dc9-abe9-d10f7a088be3/kube-multus/1.log" Dec 06 15:34:03 crc kubenswrapper[5003]: I1206 15:34:03.711235 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 06 15:34:03 crc kubenswrapper[5003]: E1206 15:34:03.711359 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 06 15:34:03 crc kubenswrapper[5003]: I1206 15:34:03.711407 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 06 15:34:03 crc kubenswrapper[5003]: E1206 15:34:03.711456 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 06 15:34:03 crc kubenswrapper[5003]: I1206 15:34:03.711906 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 06 15:34:03 crc kubenswrapper[5003]: E1206 15:34:03.712166 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 06 15:34:04 crc kubenswrapper[5003]: I1206 15:34:04.711333 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-jmzd9" Dec 06 15:34:04 crc kubenswrapper[5003]: E1206 15:34:04.711564 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-jmzd9" podUID="9fa121e1-7f2f-4912-945f-86cb199c3014" Dec 06 15:34:05 crc kubenswrapper[5003]: I1206 15:34:05.712177 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 06 15:34:05 crc kubenswrapper[5003]: I1206 15:34:05.712208 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 06 15:34:05 crc kubenswrapper[5003]: E1206 15:34:05.712409 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 06 15:34:05 crc kubenswrapper[5003]: I1206 15:34:05.712464 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 06 15:34:05 crc kubenswrapper[5003]: E1206 15:34:05.713202 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 06 15:34:05 crc kubenswrapper[5003]: E1206 15:34:05.713435 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 06 15:34:06 crc kubenswrapper[5003]: I1206 15:34:06.711652 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-jmzd9" Dec 06 15:34:06 crc kubenswrapper[5003]: E1206 15:34:06.711789 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-jmzd9" podUID="9fa121e1-7f2f-4912-945f-86cb199c3014" Dec 06 15:34:06 crc kubenswrapper[5003]: E1206 15:34:06.918216 5003 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 06 15:34:07 crc kubenswrapper[5003]: I1206 15:34:07.711528 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 06 15:34:07 crc kubenswrapper[5003]: I1206 15:34:07.711601 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 06 15:34:07 crc kubenswrapper[5003]: I1206 15:34:07.711535 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 06 15:34:07 crc kubenswrapper[5003]: E1206 15:34:07.711649 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 06 15:34:07 crc kubenswrapper[5003]: E1206 15:34:07.711717 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 06 15:34:07 crc kubenswrapper[5003]: E1206 15:34:07.711858 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 06 15:34:08 crc kubenswrapper[5003]: I1206 15:34:08.711604 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-jmzd9" Dec 06 15:34:08 crc kubenswrapper[5003]: E1206 15:34:08.711787 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-jmzd9" podUID="9fa121e1-7f2f-4912-945f-86cb199c3014" Dec 06 15:34:09 crc kubenswrapper[5003]: I1206 15:34:09.712174 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 06 15:34:09 crc kubenswrapper[5003]: I1206 15:34:09.712207 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 06 15:34:09 crc kubenswrapper[5003]: E1206 15:34:09.712347 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 06 15:34:09 crc kubenswrapper[5003]: I1206 15:34:09.712369 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 06 15:34:09 crc kubenswrapper[5003]: E1206 15:34:09.712466 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 06 15:34:09 crc kubenswrapper[5003]: E1206 15:34:09.712710 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 06 15:34:10 crc kubenswrapper[5003]: I1206 15:34:10.712273 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-jmzd9" Dec 06 15:34:10 crc kubenswrapper[5003]: E1206 15:34:10.712409 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-jmzd9" podUID="9fa121e1-7f2f-4912-945f-86cb199c3014" Dec 06 15:34:11 crc kubenswrapper[5003]: I1206 15:34:11.711361 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 06 15:34:11 crc kubenswrapper[5003]: I1206 15:34:11.711427 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 06 15:34:11 crc kubenswrapper[5003]: E1206 15:34:11.713421 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 06 15:34:11 crc kubenswrapper[5003]: I1206 15:34:11.714086 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 06 15:34:11 crc kubenswrapper[5003]: E1206 15:34:11.715162 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 06 15:34:11 crc kubenswrapper[5003]: E1206 15:34:11.715425 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 06 15:34:11 crc kubenswrapper[5003]: E1206 15:34:11.918984 5003 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 06 15:34:12 crc kubenswrapper[5003]: I1206 15:34:12.711619 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-jmzd9" Dec 06 15:34:12 crc kubenswrapper[5003]: E1206 15:34:12.712049 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-jmzd9" podUID="9fa121e1-7f2f-4912-945f-86cb199c3014" Dec 06 15:34:13 crc kubenswrapper[5003]: I1206 15:34:13.712364 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 06 15:34:13 crc kubenswrapper[5003]: E1206 15:34:13.713582 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 06 15:34:13 crc kubenswrapper[5003]: I1206 15:34:13.713081 5003 scope.go:117] "RemoveContainer" containerID="22d16feb3425c5cac7562c4468723b0aae567d2d31db5516b3b0ce7d38d91c6b" Dec 06 15:34:13 crc kubenswrapper[5003]: I1206 15:34:13.713109 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 06 15:34:13 crc kubenswrapper[5003]: I1206 15:34:13.712447 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 06 15:34:13 crc kubenswrapper[5003]: E1206 15:34:13.714278 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 06 15:34:13 crc kubenswrapper[5003]: E1206 15:34:13.714419 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 06 15:34:14 crc kubenswrapper[5003]: I1206 15:34:14.791635 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-jmzd9" Dec 06 15:34:14 crc kubenswrapper[5003]: I1206 15:34:14.791643 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 06 15:34:14 crc kubenswrapper[5003]: E1206 15:34:14.791819 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-jmzd9" podUID="9fa121e1-7f2f-4912-945f-86cb199c3014" Dec 06 15:34:14 crc kubenswrapper[5003]: I1206 15:34:14.791650 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 06 15:34:14 crc kubenswrapper[5003]: E1206 15:34:14.791978 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 06 15:34:14 crc kubenswrapper[5003]: E1206 15:34:14.792050 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 06 15:34:15 crc kubenswrapper[5003]: I1206 15:34:15.377889 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-9kdpn_350e8b9a-b7bf-4dc9-abe9-d10f7a088be3/kube-multus/1.log" Dec 06 15:34:15 crc kubenswrapper[5003]: I1206 15:34:15.378007 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-9kdpn" event={"ID":"350e8b9a-b7bf-4dc9-abe9-d10f7a088be3","Type":"ContainerStarted","Data":"354d0d34ffc16523138a9bc18e8f39bb69a613fb17554414555a0bd218e7a17d"} Dec 06 15:34:15 crc kubenswrapper[5003]: I1206 15:34:15.712526 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 06 15:34:15 crc kubenswrapper[5003]: E1206 15:34:15.712648 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 06 15:34:15 crc kubenswrapper[5003]: I1206 15:34:15.713842 5003 scope.go:117] "RemoveContainer" containerID="9517eb57092dd5943648e1004f42c5da7be2e94b4e0460d9595a63ef5194be22" Dec 06 15:34:16 crc kubenswrapper[5003]: I1206 15:34:16.387139 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-p7xwd_8a695d94-271c-45bc-8a89-dfdecb57ec00/ovnkube-controller/3.log" Dec 06 15:34:16 crc kubenswrapper[5003]: I1206 15:34:16.391232 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7xwd" event={"ID":"8a695d94-271c-45bc-8a89-dfdecb57ec00","Type":"ContainerStarted","Data":"6a2972bc7291d2adcafac608a303008342fc7672795ad47464e77f3413f5822b"} Dec 06 15:34:16 crc kubenswrapper[5003]: I1206 15:34:16.391829 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-p7xwd" Dec 06 15:34:16 crc kubenswrapper[5003]: I1206 15:34:16.436352 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-p7xwd" podStartSLOduration=110.436331498 podStartE2EDuration="1m50.436331498s" podCreationTimestamp="2025-12-06 15:32:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 15:34:16.432756292 +0000 UTC m=+134.966110683" watchObservedRunningTime="2025-12-06 15:34:16.436331498 +0000 UTC m=+134.969685889" Dec 06 15:34:16 crc kubenswrapper[5003]: I1206 15:34:16.483745 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-jmzd9"] Dec 06 15:34:16 crc kubenswrapper[5003]: I1206 15:34:16.483875 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-jmzd9" Dec 06 15:34:16 crc kubenswrapper[5003]: E1206 15:34:16.483999 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-jmzd9" podUID="9fa121e1-7f2f-4912-945f-86cb199c3014" Dec 06 15:34:16 crc kubenswrapper[5003]: I1206 15:34:16.712190 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 06 15:34:16 crc kubenswrapper[5003]: I1206 15:34:16.712245 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 06 15:34:16 crc kubenswrapper[5003]: E1206 15:34:16.712334 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 06 15:34:16 crc kubenswrapper[5003]: E1206 15:34:16.712480 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 06 15:34:16 crc kubenswrapper[5003]: E1206 15:34:16.920045 5003 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 06 15:34:17 crc kubenswrapper[5003]: I1206 15:34:17.711431 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 06 15:34:17 crc kubenswrapper[5003]: E1206 15:34:17.711575 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 06 15:34:18 crc kubenswrapper[5003]: I1206 15:34:18.711737 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 06 15:34:18 crc kubenswrapper[5003]: I1206 15:34:18.711814 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 06 15:34:18 crc kubenswrapper[5003]: I1206 15:34:18.711751 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-jmzd9" Dec 06 15:34:18 crc kubenswrapper[5003]: E1206 15:34:18.711949 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 06 15:34:18 crc kubenswrapper[5003]: E1206 15:34:18.712121 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-jmzd9" podUID="9fa121e1-7f2f-4912-945f-86cb199c3014" Dec 06 15:34:18 crc kubenswrapper[5003]: E1206 15:34:18.712254 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 06 15:34:19 crc kubenswrapper[5003]: I1206 15:34:19.712035 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 06 15:34:19 crc kubenswrapper[5003]: E1206 15:34:19.712272 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 06 15:34:20 crc kubenswrapper[5003]: I1206 15:34:20.711343 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 06 15:34:20 crc kubenswrapper[5003]: E1206 15:34:20.711528 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 06 15:34:20 crc kubenswrapper[5003]: I1206 15:34:20.711377 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-jmzd9" Dec 06 15:34:20 crc kubenswrapper[5003]: E1206 15:34:20.711632 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-jmzd9" podUID="9fa121e1-7f2f-4912-945f-86cb199c3014" Dec 06 15:34:20 crc kubenswrapper[5003]: I1206 15:34:20.711374 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 06 15:34:20 crc kubenswrapper[5003]: E1206 15:34:20.711770 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 06 15:34:21 crc kubenswrapper[5003]: I1206 15:34:21.714010 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 06 15:34:21 crc kubenswrapper[5003]: E1206 15:34:21.715610 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 06 15:34:22 crc kubenswrapper[5003]: I1206 15:34:22.711954 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 06 15:34:22 crc kubenswrapper[5003]: I1206 15:34:22.711990 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-jmzd9" Dec 06 15:34:22 crc kubenswrapper[5003]: I1206 15:34:22.712069 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 06 15:34:22 crc kubenswrapper[5003]: I1206 15:34:22.714962 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Dec 06 15:34:22 crc kubenswrapper[5003]: I1206 15:34:22.714999 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Dec 06 15:34:22 crc kubenswrapper[5003]: I1206 15:34:22.715095 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Dec 06 15:34:22 crc kubenswrapper[5003]: I1206 15:34:22.715142 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Dec 06 15:34:22 crc kubenswrapper[5003]: I1206 15:34:22.715149 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Dec 06 15:34:22 crc kubenswrapper[5003]: I1206 15:34:22.715219 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Dec 06 15:34:23 crc kubenswrapper[5003]: I1206 15:34:23.711248 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 06 15:34:27 crc kubenswrapper[5003]: I1206 15:34:27.484647 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeReady" Dec 06 15:34:27 crc kubenswrapper[5003]: I1206 15:34:27.525140 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-jx64p"] Dec 06 15:34:27 crc kubenswrapper[5003]: I1206 15:34:27.526235 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-jx64p" Dec 06 15:34:27 crc kubenswrapper[5003]: I1206 15:34:27.533461 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Dec 06 15:34:27 crc kubenswrapper[5003]: I1206 15:34:27.534066 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Dec 06 15:34:27 crc kubenswrapper[5003]: I1206 15:34:27.534578 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Dec 06 15:34:27 crc kubenswrapper[5003]: I1206 15:34:27.534844 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Dec 06 15:34:27 crc kubenswrapper[5003]: I1206 15:34:27.535287 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-dh4ts"] Dec 06 15:34:27 crc kubenswrapper[5003]: I1206 15:34:27.536231 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-dh4ts" Dec 06 15:34:27 crc kubenswrapper[5003]: I1206 15:34:27.537477 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Dec 06 15:34:27 crc kubenswrapper[5003]: I1206 15:34:27.537864 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Dec 06 15:34:27 crc kubenswrapper[5003]: I1206 15:34:27.538330 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-78q2b"] Dec 06 15:34:27 crc kubenswrapper[5003]: I1206 15:34:27.538606 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Dec 06 15:34:27 crc kubenswrapper[5003]: I1206 15:34:27.538796 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Dec 06 15:34:27 crc kubenswrapper[5003]: I1206 15:34:27.539029 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-78q2b" Dec 06 15:34:27 crc kubenswrapper[5003]: I1206 15:34:27.539132 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-48sc5"] Dec 06 15:34:27 crc kubenswrapper[5003]: I1206 15:34:27.540522 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-48sc5" Dec 06 15:34:27 crc kubenswrapper[5003]: I1206 15:34:27.543188 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Dec 06 15:34:27 crc kubenswrapper[5003]: I1206 15:34:27.543854 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Dec 06 15:34:27 crc kubenswrapper[5003]: I1206 15:34:27.544925 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-g9t4q"] Dec 06 15:34:27 crc kubenswrapper[5003]: I1206 15:34:27.556450 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-machine-approver/machine-approver-56656f9798-5ww6p"] Dec 06 15:34:27 crc kubenswrapper[5003]: I1206 15:34:27.557298 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-g9t4q" Dec 06 15:34:27 crc kubenswrapper[5003]: I1206 15:34:27.572381 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Dec 06 15:34:27 crc kubenswrapper[5003]: I1206 15:34:27.580456 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-5ww6p" Dec 06 15:34:27 crc kubenswrapper[5003]: I1206 15:34:27.580831 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Dec 06 15:34:27 crc kubenswrapper[5003]: I1206 15:34:27.581442 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Dec 06 15:34:27 crc kubenswrapper[5003]: I1206 15:34:27.582015 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 06 15:34:27 crc kubenswrapper[5003]: I1206 15:34:27.582086 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Dec 06 15:34:27 crc kubenswrapper[5003]: I1206 15:34:27.582313 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Dec 06 15:34:27 crc kubenswrapper[5003]: I1206 15:34:27.582412 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Dec 06 15:34:27 crc kubenswrapper[5003]: I1206 15:34:27.582554 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 06 15:34:27 crc kubenswrapper[5003]: I1206 15:34:27.582925 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Dec 06 15:34:27 crc kubenswrapper[5003]: I1206 15:34:27.583124 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Dec 06 15:34:27 crc kubenswrapper[5003]: I1206 15:34:27.583149 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Dec 06 15:34:27 crc kubenswrapper[5003]: I1206 15:34:27.583263 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Dec 06 15:34:27 crc kubenswrapper[5003]: I1206 15:34:27.583309 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Dec 06 15:34:27 crc kubenswrapper[5003]: I1206 15:34:27.584692 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-92jlq"] Dec 06 15:34:27 crc kubenswrapper[5003]: I1206 15:34:27.585864 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Dec 06 15:34:27 crc kubenswrapper[5003]: I1206 15:34:27.585940 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console-operator/console-operator-58897d9998-drh5n"] Dec 06 15:34:27 crc kubenswrapper[5003]: I1206 15:34:27.586060 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Dec 06 15:34:27 crc kubenswrapper[5003]: I1206 15:34:27.586375 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 06 15:34:27 crc kubenswrapper[5003]: I1206 15:34:27.586568 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-drh5n" Dec 06 15:34:27 crc kubenswrapper[5003]: I1206 15:34:27.586580 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Dec 06 15:34:27 crc kubenswrapper[5003]: I1206 15:34:27.586798 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-92jlq" Dec 06 15:34:27 crc kubenswrapper[5003]: I1206 15:34:27.586584 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Dec 06 15:34:27 crc kubenswrapper[5003]: I1206 15:34:27.586631 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Dec 06 15:34:27 crc kubenswrapper[5003]: I1206 15:34:27.586726 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 06 15:34:27 crc kubenswrapper[5003]: I1206 15:34:27.586735 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 06 15:34:27 crc kubenswrapper[5003]: I1206 15:34:27.587455 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 06 15:34:27 crc kubenswrapper[5003]: I1206 15:34:27.588105 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-tsg4h"] Dec 06 15:34:27 crc kubenswrapper[5003]: I1206 15:34:27.588849 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-tsg4h" Dec 06 15:34:27 crc kubenswrapper[5003]: I1206 15:34:27.590551 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Dec 06 15:34:27 crc kubenswrapper[5003]: I1206 15:34:27.591466 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Dec 06 15:34:27 crc kubenswrapper[5003]: I1206 15:34:27.591576 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Dec 06 15:34:27 crc kubenswrapper[5003]: I1206 15:34:27.591886 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Dec 06 15:34:27 crc kubenswrapper[5003]: I1206 15:34:27.592077 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Dec 06 15:34:27 crc kubenswrapper[5003]: I1206 15:34:27.592253 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Dec 06 15:34:27 crc kubenswrapper[5003]: I1206 15:34:27.592980 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Dec 06 15:34:27 crc kubenswrapper[5003]: I1206 15:34:27.593015 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Dec 06 15:34:27 crc kubenswrapper[5003]: I1206 15:34:27.594217 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Dec 06 15:34:27 crc kubenswrapper[5003]: I1206 15:34:27.594386 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Dec 06 15:34:27 crc kubenswrapper[5003]: I1206 15:34:27.594686 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Dec 06 15:34:27 crc kubenswrapper[5003]: I1206 15:34:27.595030 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Dec 06 15:34:27 crc kubenswrapper[5003]: I1206 15:34:27.596689 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-xzdsc"] Dec 06 15:34:27 crc kubenswrapper[5003]: I1206 15:34:27.597670 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Dec 06 15:34:27 crc kubenswrapper[5003]: I1206 15:34:27.597750 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Dec 06 15:34:27 crc kubenswrapper[5003]: I1206 15:34:27.598129 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Dec 06 15:34:27 crc kubenswrapper[5003]: I1206 15:34:27.598184 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Dec 06 15:34:27 crc kubenswrapper[5003]: I1206 15:34:27.598285 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-xzdsc" Dec 06 15:34:27 crc kubenswrapper[5003]: I1206 15:34:27.598418 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Dec 06 15:34:27 crc kubenswrapper[5003]: I1206 15:34:27.598994 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Dec 06 15:34:27 crc kubenswrapper[5003]: I1206 15:34:27.599068 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Dec 06 15:34:27 crc kubenswrapper[5003]: I1206 15:34:27.599167 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Dec 06 15:34:27 crc kubenswrapper[5003]: I1206 15:34:27.599275 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Dec 06 15:34:27 crc kubenswrapper[5003]: I1206 15:34:27.599381 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Dec 06 15:34:27 crc kubenswrapper[5003]: I1206 15:34:27.599465 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Dec 06 15:34:27 crc kubenswrapper[5003]: I1206 15:34:27.599579 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Dec 06 15:34:27 crc kubenswrapper[5003]: I1206 15:34:27.599771 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Dec 06 15:34:27 crc kubenswrapper[5003]: I1206 15:34:27.599795 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Dec 06 15:34:27 crc kubenswrapper[5003]: I1206 15:34:27.599811 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Dec 06 15:34:27 crc kubenswrapper[5003]: I1206 15:34:27.599909 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Dec 06 15:34:27 crc kubenswrapper[5003]: I1206 15:34:27.600011 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Dec 06 15:34:27 crc kubenswrapper[5003]: I1206 15:34:27.600209 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Dec 06 15:34:27 crc kubenswrapper[5003]: I1206 15:34:27.600300 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Dec 06 15:34:27 crc kubenswrapper[5003]: I1206 15:34:27.600373 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Dec 06 15:34:27 crc kubenswrapper[5003]: I1206 15:34:27.601182 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Dec 06 15:34:27 crc kubenswrapper[5003]: I1206 15:34:27.601468 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Dec 06 15:34:27 crc kubenswrapper[5003]: I1206 15:34:27.603732 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-bvkqz"] Dec 06 15:34:27 crc kubenswrapper[5003]: I1206 15:34:27.604944 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-bvkqz" Dec 06 15:34:27 crc kubenswrapper[5003]: I1206 15:34:27.609739 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 06 15:34:27 crc kubenswrapper[5003]: I1206 15:34:27.611664 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-hppvr"] Dec 06 15:34:27 crc kubenswrapper[5003]: I1206 15:34:27.613827 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-jwrlg"] Dec 06 15:34:27 crc kubenswrapper[5003]: I1206 15:34:27.614084 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-hppvr" Dec 06 15:34:27 crc kubenswrapper[5003]: I1206 15:34:27.614297 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-jwrlg" Dec 06 15:34:27 crc kubenswrapper[5003]: I1206 15:34:27.635445 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9280dab9-204d-4112-98be-c6809da2ad4e-serving-cert\") pod \"apiserver-76f77b778f-jx64p\" (UID: \"9280dab9-204d-4112-98be-c6809da2ad4e\") " pod="openshift-apiserver/apiserver-76f77b778f-jx64p" Dec 06 15:34:27 crc kubenswrapper[5003]: I1206 15:34:27.635544 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/86ec403c-0ca1-43ee-893e-917c87e5e174-serving-cert\") pod \"controller-manager-879f6c89f-78q2b\" (UID: \"86ec403c-0ca1-43ee-893e-917c87e5e174\") " pod="openshift-controller-manager/controller-manager-879f6c89f-78q2b" Dec 06 15:34:27 crc kubenswrapper[5003]: I1206 15:34:27.635591 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/9280dab9-204d-4112-98be-c6809da2ad4e-etcd-client\") pod \"apiserver-76f77b778f-jx64p\" (UID: \"9280dab9-204d-4112-98be-c6809da2ad4e\") " pod="openshift-apiserver/apiserver-76f77b778f-jx64p" Dec 06 15:34:27 crc kubenswrapper[5003]: I1206 15:34:27.635620 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/9280dab9-204d-4112-98be-c6809da2ad4e-encryption-config\") pod \"apiserver-76f77b778f-jx64p\" (UID: \"9280dab9-204d-4112-98be-c6809da2ad4e\") " pod="openshift-apiserver/apiserver-76f77b778f-jx64p" Dec 06 15:34:27 crc kubenswrapper[5003]: I1206 15:34:27.635642 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/58fe25e3-98d7-4725-841b-4bcd2e2f628f-serving-cert\") pod \"apiserver-7bbb656c7d-48sc5\" (UID: \"58fe25e3-98d7-4725-841b-4bcd2e2f628f\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-48sc5" Dec 06 15:34:27 crc kubenswrapper[5003]: I1206 15:34:27.635667 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rx4sp\" (UniqueName: \"kubernetes.io/projected/58fe25e3-98d7-4725-841b-4bcd2e2f628f-kube-api-access-rx4sp\") pod \"apiserver-7bbb656c7d-48sc5\" (UID: \"58fe25e3-98d7-4725-841b-4bcd2e2f628f\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-48sc5" Dec 06 15:34:27 crc kubenswrapper[5003]: I1206 15:34:27.635691 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/9990e306-5ae4-467e-9cc4-0225f9c05fc7-client-ca\") pod \"route-controller-manager-6576b87f9c-g9t4q\" (UID: \"9990e306-5ae4-467e-9cc4-0225f9c05fc7\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-g9t4q" Dec 06 15:34:27 crc kubenswrapper[5003]: I1206 15:34:27.635726 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/86ec403c-0ca1-43ee-893e-917c87e5e174-config\") pod \"controller-manager-879f6c89f-78q2b\" (UID: \"86ec403c-0ca1-43ee-893e-917c87e5e174\") " pod="openshift-controller-manager/controller-manager-879f6c89f-78q2b" Dec 06 15:34:27 crc kubenswrapper[5003]: I1206 15:34:27.635752 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/9280dab9-204d-4112-98be-c6809da2ad4e-trusted-ca-bundle\") pod \"apiserver-76f77b778f-jx64p\" (UID: \"9280dab9-204d-4112-98be-c6809da2ad4e\") " pod="openshift-apiserver/apiserver-76f77b778f-jx64p" Dec 06 15:34:27 crc kubenswrapper[5003]: I1206 15:34:27.635776 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/58fe25e3-98d7-4725-841b-4bcd2e2f628f-audit-dir\") pod \"apiserver-7bbb656c7d-48sc5\" (UID: \"58fe25e3-98d7-4725-841b-4bcd2e2f628f\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-48sc5" Dec 06 15:34:27 crc kubenswrapper[5003]: I1206 15:34:27.635801 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/58fe25e3-98d7-4725-841b-4bcd2e2f628f-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-48sc5\" (UID: \"58fe25e3-98d7-4725-841b-4bcd2e2f628f\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-48sc5" Dec 06 15:34:27 crc kubenswrapper[5003]: I1206 15:34:27.635821 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9280dab9-204d-4112-98be-c6809da2ad4e-config\") pod \"apiserver-76f77b778f-jx64p\" (UID: \"9280dab9-204d-4112-98be-c6809da2ad4e\") " pod="openshift-apiserver/apiserver-76f77b778f-jx64p" Dec 06 15:34:27 crc kubenswrapper[5003]: I1206 15:34:27.635849 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9990e306-5ae4-467e-9cc4-0225f9c05fc7-config\") pod \"route-controller-manager-6576b87f9c-g9t4q\" (UID: \"9990e306-5ae4-467e-9cc4-0225f9c05fc7\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-g9t4q" Dec 06 15:34:27 crc kubenswrapper[5003]: I1206 15:34:27.635873 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/58fe25e3-98d7-4725-841b-4bcd2e2f628f-audit-policies\") pod \"apiserver-7bbb656c7d-48sc5\" (UID: \"58fe25e3-98d7-4725-841b-4bcd2e2f628f\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-48sc5" Dec 06 15:34:27 crc kubenswrapper[5003]: I1206 15:34:27.635899 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cdn2x\" (UniqueName: \"kubernetes.io/projected/07dcad69-d3a4-40e2-a4d2-e83eb74631d7-kube-api-access-cdn2x\") pod \"machine-api-operator-5694c8668f-dh4ts\" (UID: \"07dcad69-d3a4-40e2-a4d2-e83eb74631d7\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-dh4ts" Dec 06 15:34:27 crc kubenswrapper[5003]: I1206 15:34:27.635940 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/9280dab9-204d-4112-98be-c6809da2ad4e-audit\") pod \"apiserver-76f77b778f-jx64p\" (UID: \"9280dab9-204d-4112-98be-c6809da2ad4e\") " pod="openshift-apiserver/apiserver-76f77b778f-jx64p" Dec 06 15:34:27 crc kubenswrapper[5003]: I1206 15:34:27.635965 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/07dcad69-d3a4-40e2-a4d2-e83eb74631d7-config\") pod \"machine-api-operator-5694c8668f-dh4ts\" (UID: \"07dcad69-d3a4-40e2-a4d2-e83eb74631d7\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-dh4ts" Dec 06 15:34:27 crc kubenswrapper[5003]: I1206 15:34:27.635993 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/86ec403c-0ca1-43ee-893e-917c87e5e174-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-78q2b\" (UID: \"86ec403c-0ca1-43ee-893e-917c87e5e174\") " pod="openshift-controller-manager/controller-manager-879f6c89f-78q2b" Dec 06 15:34:27 crc kubenswrapper[5003]: I1206 15:34:27.636018 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/9280dab9-204d-4112-98be-c6809da2ad4e-audit-dir\") pod \"apiserver-76f77b778f-jx64p\" (UID: \"9280dab9-204d-4112-98be-c6809da2ad4e\") " pod="openshift-apiserver/apiserver-76f77b778f-jx64p" Dec 06 15:34:27 crc kubenswrapper[5003]: I1206 15:34:27.636044 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/07dcad69-d3a4-40e2-a4d2-e83eb74631d7-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-dh4ts\" (UID: \"07dcad69-d3a4-40e2-a4d2-e83eb74631d7\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-dh4ts" Dec 06 15:34:27 crc kubenswrapper[5003]: I1206 15:34:27.636068 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/58fe25e3-98d7-4725-841b-4bcd2e2f628f-etcd-client\") pod \"apiserver-7bbb656c7d-48sc5\" (UID: \"58fe25e3-98d7-4725-841b-4bcd2e2f628f\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-48sc5" Dec 06 15:34:27 crc kubenswrapper[5003]: I1206 15:34:27.636092 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/9280dab9-204d-4112-98be-c6809da2ad4e-image-import-ca\") pod \"apiserver-76f77b778f-jx64p\" (UID: \"9280dab9-204d-4112-98be-c6809da2ad4e\") " pod="openshift-apiserver/apiserver-76f77b778f-jx64p" Dec 06 15:34:27 crc kubenswrapper[5003]: I1206 15:34:27.636117 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9990e306-5ae4-467e-9cc4-0225f9c05fc7-serving-cert\") pod \"route-controller-manager-6576b87f9c-g9t4q\" (UID: \"9990e306-5ae4-467e-9cc4-0225f9c05fc7\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-g9t4q" Dec 06 15:34:27 crc kubenswrapper[5003]: I1206 15:34:27.636138 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4b7mh\" (UniqueName: \"kubernetes.io/projected/9280dab9-204d-4112-98be-c6809da2ad4e-kube-api-access-4b7mh\") pod \"apiserver-76f77b778f-jx64p\" (UID: \"9280dab9-204d-4112-98be-c6809da2ad4e\") " pod="openshift-apiserver/apiserver-76f77b778f-jx64p" Dec 06 15:34:27 crc kubenswrapper[5003]: I1206 15:34:27.636179 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/07dcad69-d3a4-40e2-a4d2-e83eb74631d7-images\") pod \"machine-api-operator-5694c8668f-dh4ts\" (UID: \"07dcad69-d3a4-40e2-a4d2-e83eb74631d7\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-dh4ts" Dec 06 15:34:27 crc kubenswrapper[5003]: I1206 15:34:27.636200 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wm9x4\" (UniqueName: \"kubernetes.io/projected/86ec403c-0ca1-43ee-893e-917c87e5e174-kube-api-access-wm9x4\") pod \"controller-manager-879f6c89f-78q2b\" (UID: \"86ec403c-0ca1-43ee-893e-917c87e5e174\") " pod="openshift-controller-manager/controller-manager-879f6c89f-78q2b" Dec 06 15:34:27 crc kubenswrapper[5003]: I1206 15:34:27.636223 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/58fe25e3-98d7-4725-841b-4bcd2e2f628f-encryption-config\") pod \"apiserver-7bbb656c7d-48sc5\" (UID: \"58fe25e3-98d7-4725-841b-4bcd2e2f628f\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-48sc5" Dec 06 15:34:27 crc kubenswrapper[5003]: I1206 15:34:27.636248 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/9280dab9-204d-4112-98be-c6809da2ad4e-node-pullsecrets\") pod \"apiserver-76f77b778f-jx64p\" (UID: \"9280dab9-204d-4112-98be-c6809da2ad4e\") " pod="openshift-apiserver/apiserver-76f77b778f-jx64p" Dec 06 15:34:27 crc kubenswrapper[5003]: I1206 15:34:27.636271 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/9280dab9-204d-4112-98be-c6809da2ad4e-etcd-serving-ca\") pod \"apiserver-76f77b778f-jx64p\" (UID: \"9280dab9-204d-4112-98be-c6809da2ad4e\") " pod="openshift-apiserver/apiserver-76f77b778f-jx64p" Dec 06 15:34:27 crc kubenswrapper[5003]: I1206 15:34:27.636296 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/58fe25e3-98d7-4725-841b-4bcd2e2f628f-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-48sc5\" (UID: \"58fe25e3-98d7-4725-841b-4bcd2e2f628f\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-48sc5" Dec 06 15:34:27 crc kubenswrapper[5003]: I1206 15:34:27.636317 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/86ec403c-0ca1-43ee-893e-917c87e5e174-client-ca\") pod \"controller-manager-879f6c89f-78q2b\" (UID: \"86ec403c-0ca1-43ee-893e-917c87e5e174\") " pod="openshift-controller-manager/controller-manager-879f6c89f-78q2b" Dec 06 15:34:27 crc kubenswrapper[5003]: I1206 15:34:27.636345 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s7szd\" (UniqueName: \"kubernetes.io/projected/9990e306-5ae4-467e-9cc4-0225f9c05fc7-kube-api-access-s7szd\") pod \"route-controller-manager-6576b87f9c-g9t4q\" (UID: \"9990e306-5ae4-467e-9cc4-0225f9c05fc7\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-g9t4q" Dec 06 15:34:27 crc kubenswrapper[5003]: I1206 15:34:27.639343 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Dec 06 15:34:27 crc kubenswrapper[5003]: I1206 15:34:27.639528 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Dec 06 15:34:27 crc kubenswrapper[5003]: I1206 15:34:27.639694 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Dec 06 15:34:27 crc kubenswrapper[5003]: I1206 15:34:27.639833 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Dec 06 15:34:27 crc kubenswrapper[5003]: I1206 15:34:27.639957 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Dec 06 15:34:27 crc kubenswrapper[5003]: I1206 15:34:27.644453 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-dh4ts"] Dec 06 15:34:27 crc kubenswrapper[5003]: I1206 15:34:27.644526 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-f9d7485db-npbcn"] Dec 06 15:34:27 crc kubenswrapper[5003]: I1206 15:34:27.645155 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Dec 06 15:34:27 crc kubenswrapper[5003]: I1206 15:34:27.647203 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Dec 06 15:34:27 crc kubenswrapper[5003]: I1206 15:34:27.647366 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Dec 06 15:34:27 crc kubenswrapper[5003]: I1206 15:34:27.647462 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Dec 06 15:34:27 crc kubenswrapper[5003]: I1206 15:34:27.647554 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Dec 06 15:34:27 crc kubenswrapper[5003]: I1206 15:34:27.648274 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-vr8wv"] Dec 06 15:34:27 crc kubenswrapper[5003]: I1206 15:34:27.648822 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-vr8wv" Dec 06 15:34:27 crc kubenswrapper[5003]: I1206 15:34:27.649465 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-npbcn" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.248938 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.249180 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.249346 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.249566 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.255772 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.256324 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.256387 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.256485 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.256825 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.256825 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.257061 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.257219 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.257436 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/07dcad69-d3a4-40e2-a4d2-e83eb74631d7-images\") pod \"machine-api-operator-5694c8668f-dh4ts\" (UID: \"07dcad69-d3a4-40e2-a4d2-e83eb74631d7\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-dh4ts" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.257518 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wm9x4\" (UniqueName: \"kubernetes.io/projected/86ec403c-0ca1-43ee-893e-917c87e5e174-kube-api-access-wm9x4\") pod \"controller-manager-879f6c89f-78q2b\" (UID: \"86ec403c-0ca1-43ee-893e-917c87e5e174\") " pod="openshift-controller-manager/controller-manager-879f6c89f-78q2b" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.257540 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/58fe25e3-98d7-4725-841b-4bcd2e2f628f-encryption-config\") pod \"apiserver-7bbb656c7d-48sc5\" (UID: \"58fe25e3-98d7-4725-841b-4bcd2e2f628f\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-48sc5" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.257558 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/9280dab9-204d-4112-98be-c6809da2ad4e-node-pullsecrets\") pod \"apiserver-76f77b778f-jx64p\" (UID: \"9280dab9-204d-4112-98be-c6809da2ad4e\") " pod="openshift-apiserver/apiserver-76f77b778f-jx64p" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.257573 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/9280dab9-204d-4112-98be-c6809da2ad4e-etcd-serving-ca\") pod \"apiserver-76f77b778f-jx64p\" (UID: \"9280dab9-204d-4112-98be-c6809da2ad4e\") " pod="openshift-apiserver/apiserver-76f77b778f-jx64p" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.257588 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/58fe25e3-98d7-4725-841b-4bcd2e2f628f-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-48sc5\" (UID: \"58fe25e3-98d7-4725-841b-4bcd2e2f628f\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-48sc5" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.257604 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/86ec403c-0ca1-43ee-893e-917c87e5e174-client-ca\") pod \"controller-manager-879f6c89f-78q2b\" (UID: \"86ec403c-0ca1-43ee-893e-917c87e5e174\") " pod="openshift-controller-manager/controller-manager-879f6c89f-78q2b" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.257623 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s7szd\" (UniqueName: \"kubernetes.io/projected/9990e306-5ae4-467e-9cc4-0225f9c05fc7-kube-api-access-s7szd\") pod \"route-controller-manager-6576b87f9c-g9t4q\" (UID: \"9990e306-5ae4-467e-9cc4-0225f9c05fc7\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-g9t4q" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.257647 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9280dab9-204d-4112-98be-c6809da2ad4e-serving-cert\") pod \"apiserver-76f77b778f-jx64p\" (UID: \"9280dab9-204d-4112-98be-c6809da2ad4e\") " pod="openshift-apiserver/apiserver-76f77b778f-jx64p" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.257670 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/86ec403c-0ca1-43ee-893e-917c87e5e174-serving-cert\") pod \"controller-manager-879f6c89f-78q2b\" (UID: \"86ec403c-0ca1-43ee-893e-917c87e5e174\") " pod="openshift-controller-manager/controller-manager-879f6c89f-78q2b" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.257709 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/9280dab9-204d-4112-98be-c6809da2ad4e-etcd-client\") pod \"apiserver-76f77b778f-jx64p\" (UID: \"9280dab9-204d-4112-98be-c6809da2ad4e\") " pod="openshift-apiserver/apiserver-76f77b778f-jx64p" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.257724 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/9280dab9-204d-4112-98be-c6809da2ad4e-encryption-config\") pod \"apiserver-76f77b778f-jx64p\" (UID: \"9280dab9-204d-4112-98be-c6809da2ad4e\") " pod="openshift-apiserver/apiserver-76f77b778f-jx64p" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.257826 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.257944 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.258140 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.258253 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.258278 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.258553 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/58fe25e3-98d7-4725-841b-4bcd2e2f628f-serving-cert\") pod \"apiserver-7bbb656c7d-48sc5\" (UID: \"58fe25e3-98d7-4725-841b-4bcd2e2f628f\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-48sc5" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.258585 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/86ec403c-0ca1-43ee-893e-917c87e5e174-config\") pod \"controller-manager-879f6c89f-78q2b\" (UID: \"86ec403c-0ca1-43ee-893e-917c87e5e174\") " pod="openshift-controller-manager/controller-manager-879f6c89f-78q2b" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.258620 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rx4sp\" (UniqueName: \"kubernetes.io/projected/58fe25e3-98d7-4725-841b-4bcd2e2f628f-kube-api-access-rx4sp\") pod \"apiserver-7bbb656c7d-48sc5\" (UID: \"58fe25e3-98d7-4725-841b-4bcd2e2f628f\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-48sc5" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.258638 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/9990e306-5ae4-467e-9cc4-0225f9c05fc7-client-ca\") pod \"route-controller-manager-6576b87f9c-g9t4q\" (UID: \"9990e306-5ae4-467e-9cc4-0225f9c05fc7\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-g9t4q" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.258666 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/9280dab9-204d-4112-98be-c6809da2ad4e-trusted-ca-bundle\") pod \"apiserver-76f77b778f-jx64p\" (UID: \"9280dab9-204d-4112-98be-c6809da2ad4e\") " pod="openshift-apiserver/apiserver-76f77b778f-jx64p" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.258681 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/58fe25e3-98d7-4725-841b-4bcd2e2f628f-audit-dir\") pod \"apiserver-7bbb656c7d-48sc5\" (UID: \"58fe25e3-98d7-4725-841b-4bcd2e2f628f\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-48sc5" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.258700 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/58fe25e3-98d7-4725-841b-4bcd2e2f628f-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-48sc5\" (UID: \"58fe25e3-98d7-4725-841b-4bcd2e2f628f\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-48sc5" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.258718 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9280dab9-204d-4112-98be-c6809da2ad4e-config\") pod \"apiserver-76f77b778f-jx64p\" (UID: \"9280dab9-204d-4112-98be-c6809da2ad4e\") " pod="openshift-apiserver/apiserver-76f77b778f-jx64p" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.258737 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/58fe25e3-98d7-4725-841b-4bcd2e2f628f-audit-policies\") pod \"apiserver-7bbb656c7d-48sc5\" (UID: \"58fe25e3-98d7-4725-841b-4bcd2e2f628f\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-48sc5" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.258754 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9990e306-5ae4-467e-9cc4-0225f9c05fc7-config\") pod \"route-controller-manager-6576b87f9c-g9t4q\" (UID: \"9990e306-5ae4-467e-9cc4-0225f9c05fc7\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-g9t4q" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.258775 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cdn2x\" (UniqueName: \"kubernetes.io/projected/07dcad69-d3a4-40e2-a4d2-e83eb74631d7-kube-api-access-cdn2x\") pod \"machine-api-operator-5694c8668f-dh4ts\" (UID: \"07dcad69-d3a4-40e2-a4d2-e83eb74631d7\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-dh4ts" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.258800 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/9280dab9-204d-4112-98be-c6809da2ad4e-audit\") pod \"apiserver-76f77b778f-jx64p\" (UID: \"9280dab9-204d-4112-98be-c6809da2ad4e\") " pod="openshift-apiserver/apiserver-76f77b778f-jx64p" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.258816 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/07dcad69-d3a4-40e2-a4d2-e83eb74631d7-config\") pod \"machine-api-operator-5694c8668f-dh4ts\" (UID: \"07dcad69-d3a4-40e2-a4d2-e83eb74631d7\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-dh4ts" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.258835 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/86ec403c-0ca1-43ee-893e-917c87e5e174-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-78q2b\" (UID: \"86ec403c-0ca1-43ee-893e-917c87e5e174\") " pod="openshift-controller-manager/controller-manager-879f6c89f-78q2b" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.258853 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/9280dab9-204d-4112-98be-c6809da2ad4e-audit-dir\") pod \"apiserver-76f77b778f-jx64p\" (UID: \"9280dab9-204d-4112-98be-c6809da2ad4e\") " pod="openshift-apiserver/apiserver-76f77b778f-jx64p" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.258869 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/07dcad69-d3a4-40e2-a4d2-e83eb74631d7-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-dh4ts\" (UID: \"07dcad69-d3a4-40e2-a4d2-e83eb74631d7\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-dh4ts" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.258926 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/58fe25e3-98d7-4725-841b-4bcd2e2f628f-etcd-client\") pod \"apiserver-7bbb656c7d-48sc5\" (UID: \"58fe25e3-98d7-4725-841b-4bcd2e2f628f\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-48sc5" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.258948 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/9280dab9-204d-4112-98be-c6809da2ad4e-image-import-ca\") pod \"apiserver-76f77b778f-jx64p\" (UID: \"9280dab9-204d-4112-98be-c6809da2ad4e\") " pod="openshift-apiserver/apiserver-76f77b778f-jx64p" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.258980 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9990e306-5ae4-467e-9cc4-0225f9c05fc7-serving-cert\") pod \"route-controller-manager-6576b87f9c-g9t4q\" (UID: \"9990e306-5ae4-467e-9cc4-0225f9c05fc7\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-g9t4q" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.258995 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4b7mh\" (UniqueName: \"kubernetes.io/projected/9280dab9-204d-4112-98be-c6809da2ad4e-kube-api-access-4b7mh\") pod \"apiserver-76f77b778f-jx64p\" (UID: \"9280dab9-204d-4112-98be-c6809da2ad4e\") " pod="openshift-apiserver/apiserver-76f77b778f-jx64p" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.259055 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.259656 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.262147 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/86ec403c-0ca1-43ee-893e-917c87e5e174-client-ca\") pod \"controller-manager-879f6c89f-78q2b\" (UID: \"86ec403c-0ca1-43ee-893e-917c87e5e174\") " pod="openshift-controller-manager/controller-manager-879f6c89f-78q2b" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.263023 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/07dcad69-d3a4-40e2-a4d2-e83eb74631d7-images\") pod \"machine-api-operator-5694c8668f-dh4ts\" (UID: \"07dcad69-d3a4-40e2-a4d2-e83eb74631d7\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-dh4ts" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.264127 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.264715 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/9990e306-5ae4-467e-9cc4-0225f9c05fc7-client-ca\") pod \"route-controller-manager-6576b87f9c-g9t4q\" (UID: \"9990e306-5ae4-467e-9cc4-0225f9c05fc7\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-g9t4q" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.265448 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/07dcad69-d3a4-40e2-a4d2-e83eb74631d7-config\") pod \"machine-api-operator-5694c8668f-dh4ts\" (UID: \"07dcad69-d3a4-40e2-a4d2-e83eb74631d7\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-dh4ts" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.266180 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/58fe25e3-98d7-4725-841b-4bcd2e2f628f-audit-policies\") pod \"apiserver-7bbb656c7d-48sc5\" (UID: \"58fe25e3-98d7-4725-841b-4bcd2e2f628f\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-48sc5" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.266988 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/58fe25e3-98d7-4725-841b-4bcd2e2f628f-audit-dir\") pod \"apiserver-7bbb656c7d-48sc5\" (UID: \"58fe25e3-98d7-4725-841b-4bcd2e2f628f\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-48sc5" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.267188 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9990e306-5ae4-467e-9cc4-0225f9c05fc7-config\") pod \"route-controller-manager-6576b87f9c-g9t4q\" (UID: \"9990e306-5ae4-467e-9cc4-0225f9c05fc7\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-g9t4q" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.268413 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.269879 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/9280dab9-204d-4112-98be-c6809da2ad4e-image-import-ca\") pod \"apiserver-76f77b778f-jx64p\" (UID: \"9280dab9-204d-4112-98be-c6809da2ad4e\") " pod="openshift-apiserver/apiserver-76f77b778f-jx64p" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.270581 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.270652 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9280dab9-204d-4112-98be-c6809da2ad4e-config\") pod \"apiserver-76f77b778f-jx64p\" (UID: \"9280dab9-204d-4112-98be-c6809da2ad4e\") " pod="openshift-apiserver/apiserver-76f77b778f-jx64p" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.271451 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/58fe25e3-98d7-4725-841b-4bcd2e2f628f-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-48sc5\" (UID: \"58fe25e3-98d7-4725-841b-4bcd2e2f628f\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-48sc5" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.271868 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9280dab9-204d-4112-98be-c6809da2ad4e-serving-cert\") pod \"apiserver-76f77b778f-jx64p\" (UID: \"9280dab9-204d-4112-98be-c6809da2ad4e\") " pod="openshift-apiserver/apiserver-76f77b778f-jx64p" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.273602 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/9280dab9-204d-4112-98be-c6809da2ad4e-node-pullsecrets\") pod \"apiserver-76f77b778f-jx64p\" (UID: \"9280dab9-204d-4112-98be-c6809da2ad4e\") " pod="openshift-apiserver/apiserver-76f77b778f-jx64p" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.275063 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/58fe25e3-98d7-4725-841b-4bcd2e2f628f-etcd-client\") pod \"apiserver-7bbb656c7d-48sc5\" (UID: \"58fe25e3-98d7-4725-841b-4bcd2e2f628f\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-48sc5" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.277082 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/86ec403c-0ca1-43ee-893e-917c87e5e174-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-78q2b\" (UID: \"86ec403c-0ca1-43ee-893e-917c87e5e174\") " pod="openshift-controller-manager/controller-manager-879f6c89f-78q2b" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.277195 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/9280dab9-204d-4112-98be-c6809da2ad4e-audit-dir\") pod \"apiserver-76f77b778f-jx64p\" (UID: \"9280dab9-204d-4112-98be-c6809da2ad4e\") " pod="openshift-apiserver/apiserver-76f77b778f-jx64p" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.279065 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/58fe25e3-98d7-4725-841b-4bcd2e2f628f-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-48sc5\" (UID: \"58fe25e3-98d7-4725-841b-4bcd2e2f628f\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-48sc5" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.282725 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/07dcad69-d3a4-40e2-a4d2-e83eb74631d7-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-dh4ts\" (UID: \"07dcad69-d3a4-40e2-a4d2-e83eb74631d7\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-dh4ts" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.284208 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/9280dab9-204d-4112-98be-c6809da2ad4e-trusted-ca-bundle\") pod \"apiserver-76f77b778f-jx64p\" (UID: \"9280dab9-204d-4112-98be-c6809da2ad4e\") " pod="openshift-apiserver/apiserver-76f77b778f-jx64p" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.288457 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9990e306-5ae4-467e-9cc4-0225f9c05fc7-serving-cert\") pod \"route-controller-manager-6576b87f9c-g9t4q\" (UID: \"9990e306-5ae4-467e-9cc4-0225f9c05fc7\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-g9t4q" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.296224 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/9280dab9-204d-4112-98be-c6809da2ad4e-audit\") pod \"apiserver-76f77b778f-jx64p\" (UID: \"9280dab9-204d-4112-98be-c6809da2ad4e\") " pod="openshift-apiserver/apiserver-76f77b778f-jx64p" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.296557 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/9280dab9-204d-4112-98be-c6809da2ad4e-encryption-config\") pod \"apiserver-76f77b778f-jx64p\" (UID: \"9280dab9-204d-4112-98be-c6809da2ad4e\") " pod="openshift-apiserver/apiserver-76f77b778f-jx64p" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.300132 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/58fe25e3-98d7-4725-841b-4bcd2e2f628f-serving-cert\") pod \"apiserver-7bbb656c7d-48sc5\" (UID: \"58fe25e3-98d7-4725-841b-4bcd2e2f628f\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-48sc5" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.300818 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/9280dab9-204d-4112-98be-c6809da2ad4e-etcd-serving-ca\") pod \"apiserver-76f77b778f-jx64p\" (UID: \"9280dab9-204d-4112-98be-c6809da2ad4e\") " pod="openshift-apiserver/apiserver-76f77b778f-jx64p" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.305000 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.305038 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.305321 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.308204 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.316455 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/9280dab9-204d-4112-98be-c6809da2ad4e-etcd-client\") pod \"apiserver-76f77b778f-jx64p\" (UID: \"9280dab9-204d-4112-98be-c6809da2ad4e\") " pod="openshift-apiserver/apiserver-76f77b778f-jx64p" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.316892 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/58fe25e3-98d7-4725-841b-4bcd2e2f628f-encryption-config\") pod \"apiserver-7bbb656c7d-48sc5\" (UID: \"58fe25e3-98d7-4725-841b-4bcd2e2f628f\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-48sc5" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.317308 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cdn2x\" (UniqueName: \"kubernetes.io/projected/07dcad69-d3a4-40e2-a4d2-e83eb74631d7-kube-api-access-cdn2x\") pod \"machine-api-operator-5694c8668f-dh4ts\" (UID: \"07dcad69-d3a4-40e2-a4d2-e83eb74631d7\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-dh4ts" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.317376 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/86ec403c-0ca1-43ee-893e-917c87e5e174-config\") pod \"controller-manager-879f6c89f-78q2b\" (UID: \"86ec403c-0ca1-43ee-893e-917c87e5e174\") " pod="openshift-controller-manager/controller-manager-879f6c89f-78q2b" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.317397 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/downloads-7954f5f757-xs4nd"] Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.318426 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-xs4nd" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.319390 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rx4sp\" (UniqueName: \"kubernetes.io/projected/58fe25e3-98d7-4725-841b-4bcd2e2f628f-kube-api-access-rx4sp\") pod \"apiserver-7bbb656c7d-48sc5\" (UID: \"58fe25e3-98d7-4725-841b-4bcd2e2f628f\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-48sc5" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.319476 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-92jlq"] Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.320170 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4b7mh\" (UniqueName: \"kubernetes.io/projected/9280dab9-204d-4112-98be-c6809da2ad4e-kube-api-access-4b7mh\") pod \"apiserver-76f77b778f-jx64p\" (UID: \"9280dab9-204d-4112-98be-c6809da2ad4e\") " pod="openshift-apiserver/apiserver-76f77b778f-jx64p" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.320743 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s7szd\" (UniqueName: \"kubernetes.io/projected/9990e306-5ae4-467e-9cc4-0225f9c05fc7-kube-api-access-s7szd\") pod \"route-controller-manager-6576b87f9c-g9t4q\" (UID: \"9990e306-5ae4-467e-9cc4-0225f9c05fc7\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-g9t4q" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.321462 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.321695 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-g9t4q"] Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.323118 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wm9x4\" (UniqueName: \"kubernetes.io/projected/86ec403c-0ca1-43ee-893e-917c87e5e174-kube-api-access-wm9x4\") pod \"controller-manager-879f6c89f-78q2b\" (UID: \"86ec403c-0ca1-43ee-893e-917c87e5e174\") " pod="openshift-controller-manager/controller-manager-879f6c89f-78q2b" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.323208 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-78q2b"] Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.325180 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/86ec403c-0ca1-43ee-893e-917c87e5e174-serving-cert\") pod \"controller-manager-879f6c89f-78q2b\" (UID: \"86ec403c-0ca1-43ee-893e-917c87e5e174\") " pod="openshift-controller-manager/controller-manager-879f6c89f-78q2b" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.327441 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-xzdsc"] Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.327517 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-48sc5"] Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.327533 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-jx64p"] Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.329473 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-tsg4h"] Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.332628 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-drh5n"] Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.332766 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-npbcn"] Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.332969 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-jwrlg"] Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.336650 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-hppvr"] Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.342917 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-xs4nd"] Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.345566 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-bvkqz"] Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.355701 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-vr8wv"] Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.358930 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-tvwvh"] Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.359676 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-tvwvh" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.361775 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-p7hq2"] Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.364519 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.364804 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.365059 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.365361 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-p7hq2" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.365618 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-94w25"] Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.369685 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.371155 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.371408 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.372312 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.373965 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.374135 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.375728 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.376328 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.376979 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-94w25" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.381370 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.381617 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.381791 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.387169 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.390061 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-mvtxr"] Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.390414 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/0fc961e1-eee3-4fd5-ac99-56b85320740b-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-tsg4h\" (UID: \"0fc961e1-eee3-4fd5-ac99-56b85320740b\") " pod="openshift-authentication/oauth-openshift-558db77b4-tsg4h" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.390502 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/41e7b2c5-c9fe-4000-830c-bf3351dd327f-config\") pod \"machine-approver-56656f9798-5ww6p\" (UID: \"41e7b2c5-c9fe-4000-830c-bf3351dd327f\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-5ww6p" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.390552 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sccq2\" (UniqueName: \"kubernetes.io/projected/e7652f19-206e-401f-8424-e2af50465b27-kube-api-access-sccq2\") pod \"authentication-operator-69f744f599-hppvr\" (UID: \"e7652f19-206e-401f-8424-e2af50465b27\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-hppvr" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.390585 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/58dc4be2-81aa-4567-b800-1b77019a7eca-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-vr8wv\" (UID: \"58dc4be2-81aa-4567-b800-1b77019a7eca\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-vr8wv" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.390609 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/41e7b2c5-c9fe-4000-830c-bf3351dd327f-auth-proxy-config\") pod \"machine-approver-56656f9798-5ww6p\" (UID: \"41e7b2c5-c9fe-4000-830c-bf3351dd327f\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-5ww6p" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.390631 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/cee02b0a-ce4b-452e-aa00-48c7823c13d0-trusted-ca-bundle\") pod \"console-f9d7485db-npbcn\" (UID: \"cee02b0a-ce4b-452e-aa00-48c7823c13d0\") " pod="openshift-console/console-f9d7485db-npbcn" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.390659 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ctd7v\" (UniqueName: \"kubernetes.io/projected/58dc4be2-81aa-4567-b800-1b77019a7eca-kube-api-access-ctd7v\") pod \"cluster-image-registry-operator-dc59b4c8b-vr8wv\" (UID: \"58dc4be2-81aa-4567-b800-1b77019a7eca\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-vr8wv" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.390920 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/cf6c1084-3ac6-4ac5-a15d-7e85f6cf75f5-trusted-ca\") pod \"console-operator-58897d9998-drh5n\" (UID: \"cf6c1084-3ac6-4ac5-a15d-7e85f6cf75f5\") " pod="openshift-console-operator/console-operator-58897d9998-drh5n" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.390947 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-27jp7\" (UniqueName: \"kubernetes.io/projected/41e7b2c5-c9fe-4000-830c-bf3351dd327f-kube-api-access-27jp7\") pod \"machine-approver-56656f9798-5ww6p\" (UID: \"41e7b2c5-c9fe-4000-830c-bf3351dd327f\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-5ww6p" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.390968 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09de6d60-7a17-4222-b6ea-457b9e58a937-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-92jlq\" (UID: \"09de6d60-7a17-4222-b6ea-457b9e58a937\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-92jlq" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.390998 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e7652f19-206e-401f-8424-e2af50465b27-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-hppvr\" (UID: \"e7652f19-206e-401f-8424-e2af50465b27\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-hppvr" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.391026 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-mvtxr" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.391030 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7652f19-206e-401f-8424-e2af50465b27-config\") pod \"authentication-operator-69f744f599-hppvr\" (UID: \"e7652f19-206e-401f-8424-e2af50465b27\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-hppvr" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.391071 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/343f69ef-b8b8-459c-95d1-5234344b45e0-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-xzdsc\" (UID: \"343f69ef-b8b8-459c-95d1-5234344b45e0\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-xzdsc" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.391092 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2g4n6\" (UniqueName: \"kubernetes.io/projected/0fc961e1-eee3-4fd5-ac99-56b85320740b-kube-api-access-2g4n6\") pod \"oauth-openshift-558db77b4-tsg4h\" (UID: \"0fc961e1-eee3-4fd5-ac99-56b85320740b\") " pod="openshift-authentication/oauth-openshift-558db77b4-tsg4h" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.391113 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/41e7b2c5-c9fe-4000-830c-bf3351dd327f-machine-approver-tls\") pod \"machine-approver-56656f9798-5ww6p\" (UID: \"41e7b2c5-c9fe-4000-830c-bf3351dd327f\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-5ww6p" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.391130 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/0fc961e1-eee3-4fd5-ac99-56b85320740b-audit-policies\") pod \"oauth-openshift-558db77b4-tsg4h\" (UID: \"0fc961e1-eee3-4fd5-ac99-56b85320740b\") " pod="openshift-authentication/oauth-openshift-558db77b4-tsg4h" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.391149 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/0fc961e1-eee3-4fd5-ac99-56b85320740b-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-tsg4h\" (UID: \"0fc961e1-eee3-4fd5-ac99-56b85320740b\") " pod="openshift-authentication/oauth-openshift-558db77b4-tsg4h" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.391173 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7652f19-206e-401f-8424-e2af50465b27-serving-cert\") pod \"authentication-operator-69f744f599-hppvr\" (UID: \"e7652f19-206e-401f-8424-e2af50465b27\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-hppvr" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.391192 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fmltp\" (UniqueName: \"kubernetes.io/projected/4f00accf-ea52-4f16-9749-4af762d99a60-kube-api-access-fmltp\") pod \"cluster-samples-operator-665b6dd947-jwrlg\" (UID: \"4f00accf-ea52-4f16-9749-4af762d99a60\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-jwrlg" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.391231 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r5cqd\" (UniqueName: \"kubernetes.io/projected/a48bd0ed-4703-46dd-9586-6141cfe7b15e-kube-api-access-r5cqd\") pod \"openshift-config-operator-7777fb866f-bvkqz\" (UID: \"a48bd0ed-4703-46dd-9586-6141cfe7b15e\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-bvkqz" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.391247 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/cee02b0a-ce4b-452e-aa00-48c7823c13d0-oauth-serving-cert\") pod \"console-f9d7485db-npbcn\" (UID: \"cee02b0a-ce4b-452e-aa00-48c7823c13d0\") " pod="openshift-console/console-f9d7485db-npbcn" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.391272 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09de6d60-7a17-4222-b6ea-457b9e58a937-config\") pod \"openshift-apiserver-operator-796bbdcf4f-92jlq\" (UID: \"09de6d60-7a17-4222-b6ea-457b9e58a937\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-92jlq" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.391337 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/cee02b0a-ce4b-452e-aa00-48c7823c13d0-service-ca\") pod \"console-f9d7485db-npbcn\" (UID: \"cee02b0a-ce4b-452e-aa00-48c7823c13d0\") " pod="openshift-console/console-f9d7485db-npbcn" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.391359 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/cee02b0a-ce4b-452e-aa00-48c7823c13d0-console-oauth-config\") pod \"console-f9d7485db-npbcn\" (UID: \"cee02b0a-ce4b-452e-aa00-48c7823c13d0\") " pod="openshift-console/console-f9d7485db-npbcn" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.391376 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cf6c1084-3ac6-4ac5-a15d-7e85f6cf75f5-config\") pod \"console-operator-58897d9998-drh5n\" (UID: \"cf6c1084-3ac6-4ac5-a15d-7e85f6cf75f5\") " pod="openshift-console-operator/console-operator-58897d9998-drh5n" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.391395 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cf6c1084-3ac6-4ac5-a15d-7e85f6cf75f5-serving-cert\") pod \"console-operator-58897d9998-drh5n\" (UID: \"cf6c1084-3ac6-4ac5-a15d-7e85f6cf75f5\") " pod="openshift-console-operator/console-operator-58897d9998-drh5n" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.391416 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/0fc961e1-eee3-4fd5-ac99-56b85320740b-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-tsg4h\" (UID: \"0fc961e1-eee3-4fd5-ac99-56b85320740b\") " pod="openshift-authentication/oauth-openshift-558db77b4-tsg4h" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.391436 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/0fc961e1-eee3-4fd5-ac99-56b85320740b-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-tsg4h\" (UID: \"0fc961e1-eee3-4fd5-ac99-56b85320740b\") " pod="openshift-authentication/oauth-openshift-558db77b4-tsg4h" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.391692 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress/router-default-5444994796-4qjqw"] Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.404610 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.404655 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.404790 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.404908 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.392123 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/0fc961e1-eee3-4fd5-ac99-56b85320740b-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-tsg4h\" (UID: \"0fc961e1-eee3-4fd5-ac99-56b85320740b\") " pod="openshift-authentication/oauth-openshift-558db77b4-tsg4h" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.405057 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-4qjqw" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.405838 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/0fc961e1-eee3-4fd5-ac99-56b85320740b-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-tsg4h\" (UID: \"0fc961e1-eee3-4fd5-ac99-56b85320740b\") " pod="openshift-authentication/oauth-openshift-558db77b4-tsg4h" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.405958 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/0fc961e1-eee3-4fd5-ac99-56b85320740b-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-tsg4h\" (UID: \"0fc961e1-eee3-4fd5-ac99-56b85320740b\") " pod="openshift-authentication/oauth-openshift-558db77b4-tsg4h" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.405992 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/0fc961e1-eee3-4fd5-ac99-56b85320740b-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-tsg4h\" (UID: \"0fc961e1-eee3-4fd5-ac99-56b85320740b\") " pod="openshift-authentication/oauth-openshift-558db77b4-tsg4h" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.406019 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/0fc961e1-eee3-4fd5-ac99-56b85320740b-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-tsg4h\" (UID: \"0fc961e1-eee3-4fd5-ac99-56b85320740b\") " pod="openshift-authentication/oauth-openshift-558db77b4-tsg4h" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.406047 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/a48bd0ed-4703-46dd-9586-6141cfe7b15e-available-featuregates\") pod \"openshift-config-operator-7777fb866f-bvkqz\" (UID: \"a48bd0ed-4703-46dd-9586-6141cfe7b15e\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-bvkqz" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.406068 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/0fc961e1-eee3-4fd5-ac99-56b85320740b-audit-dir\") pod \"oauth-openshift-558db77b4-tsg4h\" (UID: \"0fc961e1-eee3-4fd5-ac99-56b85320740b\") " pod="openshift-authentication/oauth-openshift-558db77b4-tsg4h" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.406094 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rzl5n\" (UniqueName: \"kubernetes.io/projected/343f69ef-b8b8-459c-95d1-5234344b45e0-kube-api-access-rzl5n\") pod \"openshift-controller-manager-operator-756b6f6bc6-xzdsc\" (UID: \"343f69ef-b8b8-459c-95d1-5234344b45e0\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-xzdsc" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.406117 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/4f00accf-ea52-4f16-9749-4af762d99a60-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-jwrlg\" (UID: \"4f00accf-ea52-4f16-9749-4af762d99a60\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-jwrlg" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.406157 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/58dc4be2-81aa-4567-b800-1b77019a7eca-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-vr8wv\" (UID: \"58dc4be2-81aa-4567-b800-1b77019a7eca\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-vr8wv" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.406179 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/cee02b0a-ce4b-452e-aa00-48c7823c13d0-console-config\") pod \"console-f9d7485db-npbcn\" (UID: \"cee02b0a-ce4b-452e-aa00-48c7823c13d0\") " pod="openshift-console/console-f9d7485db-npbcn" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.406201 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wnwqk\" (UniqueName: \"kubernetes.io/projected/cee02b0a-ce4b-452e-aa00-48c7823c13d0-kube-api-access-wnwqk\") pod \"console-f9d7485db-npbcn\" (UID: \"cee02b0a-ce4b-452e-aa00-48c7823c13d0\") " pod="openshift-console/console-f9d7485db-npbcn" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.406224 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-26wpg\" (UniqueName: \"kubernetes.io/projected/cf6c1084-3ac6-4ac5-a15d-7e85f6cf75f5-kube-api-access-26wpg\") pod \"console-operator-58897d9998-drh5n\" (UID: \"cf6c1084-3ac6-4ac5-a15d-7e85f6cf75f5\") " pod="openshift-console-operator/console-operator-58897d9998-drh5n" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.406246 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/cee02b0a-ce4b-452e-aa00-48c7823c13d0-console-serving-cert\") pod \"console-f9d7485db-npbcn\" (UID: \"cee02b0a-ce4b-452e-aa00-48c7823c13d0\") " pod="openshift-console/console-f9d7485db-npbcn" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.406271 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fl57h\" (UniqueName: \"kubernetes.io/projected/09de6d60-7a17-4222-b6ea-457b9e58a937-kube-api-access-fl57h\") pod \"openshift-apiserver-operator-796bbdcf4f-92jlq\" (UID: \"09de6d60-7a17-4222-b6ea-457b9e58a937\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-92jlq" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.406309 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e7652f19-206e-401f-8424-e2af50465b27-service-ca-bundle\") pod \"authentication-operator-69f744f599-hppvr\" (UID: \"e7652f19-206e-401f-8424-e2af50465b27\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-hppvr" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.406333 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/0fc961e1-eee3-4fd5-ac99-56b85320740b-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-tsg4h\" (UID: \"0fc961e1-eee3-4fd5-ac99-56b85320740b\") " pod="openshift-authentication/oauth-openshift-558db77b4-tsg4h" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.406372 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/58dc4be2-81aa-4567-b800-1b77019a7eca-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-vr8wv\" (UID: \"58dc4be2-81aa-4567-b800-1b77019a7eca\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-vr8wv" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.406399 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a48bd0ed-4703-46dd-9586-6141cfe7b15e-serving-cert\") pod \"openshift-config-operator-7777fb866f-bvkqz\" (UID: \"a48bd0ed-4703-46dd-9586-6141cfe7b15e\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-bvkqz" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.406423 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/0fc961e1-eee3-4fd5-ac99-56b85320740b-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-tsg4h\" (UID: \"0fc961e1-eee3-4fd5-ac99-56b85320740b\") " pod="openshift-authentication/oauth-openshift-558db77b4-tsg4h" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.406447 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/343f69ef-b8b8-459c-95d1-5234344b45e0-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-xzdsc\" (UID: \"343f69ef-b8b8-459c-95d1-5234344b45e0\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-xzdsc" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.407338 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.407597 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.407694 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-l8t2q"] Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.408022 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.408633 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-rqfnv"] Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.408064 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.408723 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-l8t2q" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.408253 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.409171 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-rqfnv" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.411083 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-pt2wd"] Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.411983 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-pt2wd" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.412600 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-xvg94"] Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.413092 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-xvg94" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.414276 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-mhpsl"] Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.415815 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-mhpsl" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.429804 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.431420 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8fpj2"] Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.432237 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8fpj2" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.435275 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-gqgvg"] Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.436344 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-gqgvg" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.436351 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-dc5hk"] Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.437269 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-dc5hk" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.437654 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-x6k88"] Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.438251 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-x6k88" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.438762 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-lwc4r"] Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.439682 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-lwc4r" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.439883 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-fp6zg"] Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.440984 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-p7hq2"] Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.441060 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-fp6zg" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.442154 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-2qwm4"] Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.442980 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-2qwm4" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.443065 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-jx64p" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.443231 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-server-ftnnx"] Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.443922 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-ftnnx" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.446248 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-rjl56"] Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.448712 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29417250-pmq24"] Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.449303 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.450377 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-rjl56" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.451650 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-canary/ingress-canary-t2c5q"] Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.451860 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29417250-pmq24" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.452144 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-rrdrz"] Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.452278 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-t2c5q" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.452725 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/dns-default-wx2w9"] Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.452860 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-rrdrz" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.453607 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-wx2w9" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.455107 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-bk7j8"] Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.456432 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-7w4jb"] Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.456723 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-bk7j8" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.456925 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-7w4jb" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.460816 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-854zg"] Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.462015 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-854zg" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.462042 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-tvwvh"] Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.463696 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-94w25"] Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.464466 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-lwc4r"] Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.465890 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-gqgvg"] Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.467151 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-mhpsl"] Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.468572 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-rjl56"] Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.469522 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-rrdrz"] Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.470615 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-mvtxr"] Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.471820 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-xvg94"] Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.480841 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8fpj2"] Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.481952 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-l8t2q"] Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.487199 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-rqfnv"] Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.487752 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-dh4ts" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.488670 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-wx2w9"] Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.489821 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.490045 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29417250-pmq24"] Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.492173 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-854zg"] Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.493454 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-x6k88"] Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.495756 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-bk7j8"] Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.496848 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-dc5hk"] Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.499114 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-7w4jb"] Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.499832 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-78q2b" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.500323 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-t2c5q"] Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.501439 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-2qwm4"] Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.502542 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-pt2wd"] Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.503677 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-fp6zg"] Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.507442 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7652f19-206e-401f-8424-e2af50465b27-serving-cert\") pod \"authentication-operator-69f744f599-hppvr\" (UID: \"e7652f19-206e-401f-8424-e2af50465b27\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-hppvr" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.507479 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fmltp\" (UniqueName: \"kubernetes.io/projected/4f00accf-ea52-4f16-9749-4af762d99a60-kube-api-access-fmltp\") pod \"cluster-samples-operator-665b6dd947-jwrlg\" (UID: \"4f00accf-ea52-4f16-9749-4af762d99a60\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-jwrlg" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.507540 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/b4d4786f-591a-43fb-afe1-04c8daa257a7-default-certificate\") pod \"router-default-5444994796-4qjqw\" (UID: \"b4d4786f-591a-43fb-afe1-04c8daa257a7\") " pod="openshift-ingress/router-default-5444994796-4qjqw" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.507623 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r5cqd\" (UniqueName: \"kubernetes.io/projected/a48bd0ed-4703-46dd-9586-6141cfe7b15e-kube-api-access-r5cqd\") pod \"openshift-config-operator-7777fb866f-bvkqz\" (UID: \"a48bd0ed-4703-46dd-9586-6141cfe7b15e\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-bvkqz" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.507650 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/cee02b0a-ce4b-452e-aa00-48c7823c13d0-oauth-serving-cert\") pod \"console-f9d7485db-npbcn\" (UID: \"cee02b0a-ce4b-452e-aa00-48c7823c13d0\") " pod="openshift-console/console-f9d7485db-npbcn" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.507672 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09de6d60-7a17-4222-b6ea-457b9e58a937-config\") pod \"openshift-apiserver-operator-796bbdcf4f-92jlq\" (UID: \"09de6d60-7a17-4222-b6ea-457b9e58a937\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-92jlq" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.507695 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/61f3ad0c-2191-4ea5-96ed-763ca80fbcba-metrics-tls\") pod \"ingress-operator-5b745b69d9-94w25\" (UID: \"61f3ad0c-2191-4ea5-96ed-763ca80fbcba\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-94w25" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.507731 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/cee02b0a-ce4b-452e-aa00-48c7823c13d0-service-ca\") pod \"console-f9d7485db-npbcn\" (UID: \"cee02b0a-ce4b-452e-aa00-48c7823c13d0\") " pod="openshift-console/console-f9d7485db-npbcn" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.507753 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cf6c1084-3ac6-4ac5-a15d-7e85f6cf75f5-config\") pod \"console-operator-58897d9998-drh5n\" (UID: \"cf6c1084-3ac6-4ac5-a15d-7e85f6cf75f5\") " pod="openshift-console-operator/console-operator-58897d9998-drh5n" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.507776 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cf6c1084-3ac6-4ac5-a15d-7e85f6cf75f5-serving-cert\") pod \"console-operator-58897d9998-drh5n\" (UID: \"cf6c1084-3ac6-4ac5-a15d-7e85f6cf75f5\") " pod="openshift-console-operator/console-operator-58897d9998-drh5n" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.507798 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/0fc961e1-eee3-4fd5-ac99-56b85320740b-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-tsg4h\" (UID: \"0fc961e1-eee3-4fd5-ac99-56b85320740b\") " pod="openshift-authentication/oauth-openshift-558db77b4-tsg4h" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.507822 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/cee02b0a-ce4b-452e-aa00-48c7823c13d0-console-oauth-config\") pod \"console-f9d7485db-npbcn\" (UID: \"cee02b0a-ce4b-452e-aa00-48c7823c13d0\") " pod="openshift-console/console-f9d7485db-npbcn" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.507843 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/0fc961e1-eee3-4fd5-ac99-56b85320740b-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-tsg4h\" (UID: \"0fc961e1-eee3-4fd5-ac99-56b85320740b\") " pod="openshift-authentication/oauth-openshift-558db77b4-tsg4h" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.507870 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/0fc961e1-eee3-4fd5-ac99-56b85320740b-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-tsg4h\" (UID: \"0fc961e1-eee3-4fd5-ac99-56b85320740b\") " pod="openshift-authentication/oauth-openshift-558db77b4-tsg4h" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.507893 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/0fc961e1-eee3-4fd5-ac99-56b85320740b-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-tsg4h\" (UID: \"0fc961e1-eee3-4fd5-ac99-56b85320740b\") " pod="openshift-authentication/oauth-openshift-558db77b4-tsg4h" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.507915 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/150d06be-be0f-4425-a584-760d19d009b5-etcd-service-ca\") pod \"etcd-operator-b45778765-p7hq2\" (UID: \"150d06be-be0f-4425-a584-760d19d009b5\") " pod="openshift-etcd-operator/etcd-operator-b45778765-p7hq2" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.507936 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/0fc961e1-eee3-4fd5-ac99-56b85320740b-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-tsg4h\" (UID: \"0fc961e1-eee3-4fd5-ac99-56b85320740b\") " pod="openshift-authentication/oauth-openshift-558db77b4-tsg4h" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.507958 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/0fc961e1-eee3-4fd5-ac99-56b85320740b-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-tsg4h\" (UID: \"0fc961e1-eee3-4fd5-ac99-56b85320740b\") " pod="openshift-authentication/oauth-openshift-558db77b4-tsg4h" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.507982 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/0fc961e1-eee3-4fd5-ac99-56b85320740b-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-tsg4h\" (UID: \"0fc961e1-eee3-4fd5-ac99-56b85320740b\") " pod="openshift-authentication/oauth-openshift-558db77b4-tsg4h" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.508009 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/a48bd0ed-4703-46dd-9586-6141cfe7b15e-available-featuregates\") pod \"openshift-config-operator-7777fb866f-bvkqz\" (UID: \"a48bd0ed-4703-46dd-9586-6141cfe7b15e\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-bvkqz" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.508031 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/0fc961e1-eee3-4fd5-ac99-56b85320740b-audit-dir\") pod \"oauth-openshift-558db77b4-tsg4h\" (UID: \"0fc961e1-eee3-4fd5-ac99-56b85320740b\") " pod="openshift-authentication/oauth-openshift-558db77b4-tsg4h" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.508053 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/150d06be-be0f-4425-a584-760d19d009b5-etcd-client\") pod \"etcd-operator-b45778765-p7hq2\" (UID: \"150d06be-be0f-4425-a584-760d19d009b5\") " pod="openshift-etcd-operator/etcd-operator-b45778765-p7hq2" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.508076 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rzl5n\" (UniqueName: \"kubernetes.io/projected/343f69ef-b8b8-459c-95d1-5234344b45e0-kube-api-access-rzl5n\") pod \"openshift-controller-manager-operator-756b6f6bc6-xzdsc\" (UID: \"343f69ef-b8b8-459c-95d1-5234344b45e0\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-xzdsc" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.508098 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/4f00accf-ea52-4f16-9749-4af762d99a60-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-jwrlg\" (UID: \"4f00accf-ea52-4f16-9749-4af762d99a60\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-jwrlg" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.508118 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/150d06be-be0f-4425-a584-760d19d009b5-config\") pod \"etcd-operator-b45778765-p7hq2\" (UID: \"150d06be-be0f-4425-a584-760d19d009b5\") " pod="openshift-etcd-operator/etcd-operator-b45778765-p7hq2" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.508150 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/cee02b0a-ce4b-452e-aa00-48c7823c13d0-console-config\") pod \"console-f9d7485db-npbcn\" (UID: \"cee02b0a-ce4b-452e-aa00-48c7823c13d0\") " pod="openshift-console/console-f9d7485db-npbcn" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.508189 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wnwqk\" (UniqueName: \"kubernetes.io/projected/cee02b0a-ce4b-452e-aa00-48c7823c13d0-kube-api-access-wnwqk\") pod \"console-f9d7485db-npbcn\" (UID: \"cee02b0a-ce4b-452e-aa00-48c7823c13d0\") " pod="openshift-console/console-f9d7485db-npbcn" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.508215 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-26wpg\" (UniqueName: \"kubernetes.io/projected/cf6c1084-3ac6-4ac5-a15d-7e85f6cf75f5-kube-api-access-26wpg\") pod \"console-operator-58897d9998-drh5n\" (UID: \"cf6c1084-3ac6-4ac5-a15d-7e85f6cf75f5\") " pod="openshift-console-operator/console-operator-58897d9998-drh5n" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.508240 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/58dc4be2-81aa-4567-b800-1b77019a7eca-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-vr8wv\" (UID: \"58dc4be2-81aa-4567-b800-1b77019a7eca\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-vr8wv" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.508261 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/cee02b0a-ce4b-452e-aa00-48c7823c13d0-console-serving-cert\") pod \"console-f9d7485db-npbcn\" (UID: \"cee02b0a-ce4b-452e-aa00-48c7823c13d0\") " pod="openshift-console/console-f9d7485db-npbcn" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.508319 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fl57h\" (UniqueName: \"kubernetes.io/projected/09de6d60-7a17-4222-b6ea-457b9e58a937-kube-api-access-fl57h\") pod \"openshift-apiserver-operator-796bbdcf4f-92jlq\" (UID: \"09de6d60-7a17-4222-b6ea-457b9e58a937\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-92jlq" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.508345 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e7652f19-206e-401f-8424-e2af50465b27-service-ca-bundle\") pod \"authentication-operator-69f744f599-hppvr\" (UID: \"e7652f19-206e-401f-8424-e2af50465b27\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-hppvr" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.508368 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/0fc961e1-eee3-4fd5-ac99-56b85320740b-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-tsg4h\" (UID: \"0fc961e1-eee3-4fd5-ac99-56b85320740b\") " pod="openshift-authentication/oauth-openshift-558db77b4-tsg4h" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.508390 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/150d06be-be0f-4425-a584-760d19d009b5-serving-cert\") pod \"etcd-operator-b45778765-p7hq2\" (UID: \"150d06be-be0f-4425-a584-760d19d009b5\") " pod="openshift-etcd-operator/etcd-operator-b45778765-p7hq2" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.508413 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/58dc4be2-81aa-4567-b800-1b77019a7eca-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-vr8wv\" (UID: \"58dc4be2-81aa-4567-b800-1b77019a7eca\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-vr8wv" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.508438 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/61f3ad0c-2191-4ea5-96ed-763ca80fbcba-bound-sa-token\") pod \"ingress-operator-5b745b69d9-94w25\" (UID: \"61f3ad0c-2191-4ea5-96ed-763ca80fbcba\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-94w25" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.508461 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/0fc961e1-eee3-4fd5-ac99-56b85320740b-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-tsg4h\" (UID: \"0fc961e1-eee3-4fd5-ac99-56b85320740b\") " pod="openshift-authentication/oauth-openshift-558db77b4-tsg4h" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.508500 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/343f69ef-b8b8-459c-95d1-5234344b45e0-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-xzdsc\" (UID: \"343f69ef-b8b8-459c-95d1-5234344b45e0\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-xzdsc" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.508525 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a48bd0ed-4703-46dd-9586-6141cfe7b15e-serving-cert\") pod \"openshift-config-operator-7777fb866f-bvkqz\" (UID: \"a48bd0ed-4703-46dd-9586-6141cfe7b15e\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-bvkqz" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.508546 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/0fc961e1-eee3-4fd5-ac99-56b85320740b-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-tsg4h\" (UID: \"0fc961e1-eee3-4fd5-ac99-56b85320740b\") " pod="openshift-authentication/oauth-openshift-558db77b4-tsg4h" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.508568 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nzvcm\" (UniqueName: \"kubernetes.io/projected/61f3ad0c-2191-4ea5-96ed-763ca80fbcba-kube-api-access-nzvcm\") pod \"ingress-operator-5b745b69d9-94w25\" (UID: \"61f3ad0c-2191-4ea5-96ed-763ca80fbcba\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-94w25" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.508590 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/41e7b2c5-c9fe-4000-830c-bf3351dd327f-config\") pod \"machine-approver-56656f9798-5ww6p\" (UID: \"41e7b2c5-c9fe-4000-830c-bf3351dd327f\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-5ww6p" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.508611 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cwk6j\" (UniqueName: \"kubernetes.io/projected/b4d4786f-591a-43fb-afe1-04c8daa257a7-kube-api-access-cwk6j\") pod \"router-default-5444994796-4qjqw\" (UID: \"b4d4786f-591a-43fb-afe1-04c8daa257a7\") " pod="openshift-ingress/router-default-5444994796-4qjqw" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.508619 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.508637 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/cee02b0a-ce4b-452e-aa00-48c7823c13d0-trusted-ca-bundle\") pod \"console-f9d7485db-npbcn\" (UID: \"cee02b0a-ce4b-452e-aa00-48c7823c13d0\") " pod="openshift-console/console-f9d7485db-npbcn" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.508659 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b4d4786f-591a-43fb-afe1-04c8daa257a7-service-ca-bundle\") pod \"router-default-5444994796-4qjqw\" (UID: \"b4d4786f-591a-43fb-afe1-04c8daa257a7\") " pod="openshift-ingress/router-default-5444994796-4qjqw" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.508684 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sccq2\" (UniqueName: \"kubernetes.io/projected/e7652f19-206e-401f-8424-e2af50465b27-kube-api-access-sccq2\") pod \"authentication-operator-69f744f599-hppvr\" (UID: \"e7652f19-206e-401f-8424-e2af50465b27\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-hppvr" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.508707 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/58dc4be2-81aa-4567-b800-1b77019a7eca-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-vr8wv\" (UID: \"58dc4be2-81aa-4567-b800-1b77019a7eca\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-vr8wv" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.508729 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/41e7b2c5-c9fe-4000-830c-bf3351dd327f-auth-proxy-config\") pod \"machine-approver-56656f9798-5ww6p\" (UID: \"41e7b2c5-c9fe-4000-830c-bf3351dd327f\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-5ww6p" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.508761 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ctd7v\" (UniqueName: \"kubernetes.io/projected/58dc4be2-81aa-4567-b800-1b77019a7eca-kube-api-access-ctd7v\") pod \"cluster-image-registry-operator-dc59b4c8b-vr8wv\" (UID: \"58dc4be2-81aa-4567-b800-1b77019a7eca\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-vr8wv" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.508785 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/150d06be-be0f-4425-a584-760d19d009b5-etcd-ca\") pod \"etcd-operator-b45778765-p7hq2\" (UID: \"150d06be-be0f-4425-a584-760d19d009b5\") " pod="openshift-etcd-operator/etcd-operator-b45778765-p7hq2" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.508809 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/cf6c1084-3ac6-4ac5-a15d-7e85f6cf75f5-trusted-ca\") pod \"console-operator-58897d9998-drh5n\" (UID: \"cf6c1084-3ac6-4ac5-a15d-7e85f6cf75f5\") " pod="openshift-console-operator/console-operator-58897d9998-drh5n" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.508832 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6qkg9\" (UniqueName: \"kubernetes.io/projected/d7c10f1a-b1ca-4c58-882a-f5d834b31b5a-kube-api-access-6qkg9\") pod \"dns-operator-744455d44c-mvtxr\" (UID: \"d7c10f1a-b1ca-4c58-882a-f5d834b31b5a\") " pod="openshift-dns-operator/dns-operator-744455d44c-mvtxr" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.508855 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/b4d4786f-591a-43fb-afe1-04c8daa257a7-metrics-certs\") pod \"router-default-5444994796-4qjqw\" (UID: \"b4d4786f-591a-43fb-afe1-04c8daa257a7\") " pod="openshift-ingress/router-default-5444994796-4qjqw" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.508878 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jswnm\" (UniqueName: \"kubernetes.io/projected/495babf4-9201-4523-8a21-44e001d4f4c1-kube-api-access-jswnm\") pod \"downloads-7954f5f757-xs4nd\" (UID: \"495babf4-9201-4523-8a21-44e001d4f4c1\") " pod="openshift-console/downloads-7954f5f757-xs4nd" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.508900 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/d7c10f1a-b1ca-4c58-882a-f5d834b31b5a-metrics-tls\") pod \"dns-operator-744455d44c-mvtxr\" (UID: \"d7c10f1a-b1ca-4c58-882a-f5d834b31b5a\") " pod="openshift-dns-operator/dns-operator-744455d44c-mvtxr" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.508928 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e7652f19-206e-401f-8424-e2af50465b27-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-hppvr\" (UID: \"e7652f19-206e-401f-8424-e2af50465b27\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-hppvr" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.508952 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-27jp7\" (UniqueName: \"kubernetes.io/projected/41e7b2c5-c9fe-4000-830c-bf3351dd327f-kube-api-access-27jp7\") pod \"machine-approver-56656f9798-5ww6p\" (UID: \"41e7b2c5-c9fe-4000-830c-bf3351dd327f\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-5ww6p" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.508974 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09de6d60-7a17-4222-b6ea-457b9e58a937-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-92jlq\" (UID: \"09de6d60-7a17-4222-b6ea-457b9e58a937\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-92jlq" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.509001 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-86b74\" (UniqueName: \"kubernetes.io/projected/150d06be-be0f-4425-a584-760d19d009b5-kube-api-access-86b74\") pod \"etcd-operator-b45778765-p7hq2\" (UID: \"150d06be-be0f-4425-a584-760d19d009b5\") " pod="openshift-etcd-operator/etcd-operator-b45778765-p7hq2" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.509029 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/61f3ad0c-2191-4ea5-96ed-763ca80fbcba-trusted-ca\") pod \"ingress-operator-5b745b69d9-94w25\" (UID: \"61f3ad0c-2191-4ea5-96ed-763ca80fbcba\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-94w25" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.509055 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7652f19-206e-401f-8424-e2af50465b27-config\") pod \"authentication-operator-69f744f599-hppvr\" (UID: \"e7652f19-206e-401f-8424-e2af50465b27\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-hppvr" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.509077 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/b4d4786f-591a-43fb-afe1-04c8daa257a7-stats-auth\") pod \"router-default-5444994796-4qjqw\" (UID: \"b4d4786f-591a-43fb-afe1-04c8daa257a7\") " pod="openshift-ingress/router-default-5444994796-4qjqw" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.509100 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/343f69ef-b8b8-459c-95d1-5234344b45e0-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-xzdsc\" (UID: \"343f69ef-b8b8-459c-95d1-5234344b45e0\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-xzdsc" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.509127 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2g4n6\" (UniqueName: \"kubernetes.io/projected/0fc961e1-eee3-4fd5-ac99-56b85320740b-kube-api-access-2g4n6\") pod \"oauth-openshift-558db77b4-tsg4h\" (UID: \"0fc961e1-eee3-4fd5-ac99-56b85320740b\") " pod="openshift-authentication/oauth-openshift-558db77b4-tsg4h" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.509153 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/41e7b2c5-c9fe-4000-830c-bf3351dd327f-machine-approver-tls\") pod \"machine-approver-56656f9798-5ww6p\" (UID: \"41e7b2c5-c9fe-4000-830c-bf3351dd327f\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-5ww6p" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.509177 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/0fc961e1-eee3-4fd5-ac99-56b85320740b-audit-policies\") pod \"oauth-openshift-558db77b4-tsg4h\" (UID: \"0fc961e1-eee3-4fd5-ac99-56b85320740b\") " pod="openshift-authentication/oauth-openshift-558db77b4-tsg4h" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.509199 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/0fc961e1-eee3-4fd5-ac99-56b85320740b-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-tsg4h\" (UID: \"0fc961e1-eee3-4fd5-ac99-56b85320740b\") " pod="openshift-authentication/oauth-openshift-558db77b4-tsg4h" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.509946 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/0fc961e1-eee3-4fd5-ac99-56b85320740b-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-tsg4h\" (UID: \"0fc961e1-eee3-4fd5-ac99-56b85320740b\") " pod="openshift-authentication/oauth-openshift-558db77b4-tsg4h" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.511290 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/cee02b0a-ce4b-452e-aa00-48c7823c13d0-console-config\") pod \"console-f9d7485db-npbcn\" (UID: \"cee02b0a-ce4b-452e-aa00-48c7823c13d0\") " pod="openshift-console/console-f9d7485db-npbcn" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.511406 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e7652f19-206e-401f-8424-e2af50465b27-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-hppvr\" (UID: \"e7652f19-206e-401f-8424-e2af50465b27\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-hppvr" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.512232 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/cf6c1084-3ac6-4ac5-a15d-7e85f6cf75f5-trusted-ca\") pod \"console-operator-58897d9998-drh5n\" (UID: \"cf6c1084-3ac6-4ac5-a15d-7e85f6cf75f5\") " pod="openshift-console-operator/console-operator-58897d9998-drh5n" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.512505 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/58dc4be2-81aa-4567-b800-1b77019a7eca-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-vr8wv\" (UID: \"58dc4be2-81aa-4567-b800-1b77019a7eca\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-vr8wv" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.512921 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/41e7b2c5-c9fe-4000-830c-bf3351dd327f-auth-proxy-config\") pod \"machine-approver-56656f9798-5ww6p\" (UID: \"41e7b2c5-c9fe-4000-830c-bf3351dd327f\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-5ww6p" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.513826 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/0fc961e1-eee3-4fd5-ac99-56b85320740b-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-tsg4h\" (UID: \"0fc961e1-eee3-4fd5-ac99-56b85320740b\") " pod="openshift-authentication/oauth-openshift-558db77b4-tsg4h" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.514305 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/cee02b0a-ce4b-452e-aa00-48c7823c13d0-oauth-serving-cert\") pod \"console-f9d7485db-npbcn\" (UID: \"cee02b0a-ce4b-452e-aa00-48c7823c13d0\") " pod="openshift-console/console-f9d7485db-npbcn" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.514658 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/0fc961e1-eee3-4fd5-ac99-56b85320740b-audit-dir\") pod \"oauth-openshift-558db77b4-tsg4h\" (UID: \"0fc961e1-eee3-4fd5-ac99-56b85320740b\") " pod="openshift-authentication/oauth-openshift-558db77b4-tsg4h" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.515150 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/cee02b0a-ce4b-452e-aa00-48c7823c13d0-console-oauth-config\") pod \"console-f9d7485db-npbcn\" (UID: \"cee02b0a-ce4b-452e-aa00-48c7823c13d0\") " pod="openshift-console/console-f9d7485db-npbcn" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.515359 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-48sc5" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.515566 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7652f19-206e-401f-8424-e2af50465b27-config\") pod \"authentication-operator-69f744f599-hppvr\" (UID: \"e7652f19-206e-401f-8424-e2af50465b27\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-hppvr" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.516354 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e7652f19-206e-401f-8424-e2af50465b27-service-ca-bundle\") pod \"authentication-operator-69f744f599-hppvr\" (UID: \"e7652f19-206e-401f-8424-e2af50465b27\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-hppvr" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.516924 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/41e7b2c5-c9fe-4000-830c-bf3351dd327f-config\") pod \"machine-approver-56656f9798-5ww6p\" (UID: \"41e7b2c5-c9fe-4000-830c-bf3351dd327f\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-5ww6p" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.517211 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/cee02b0a-ce4b-452e-aa00-48c7823c13d0-service-ca\") pod \"console-f9d7485db-npbcn\" (UID: \"cee02b0a-ce4b-452e-aa00-48c7823c13d0\") " pod="openshift-console/console-f9d7485db-npbcn" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.517390 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/cee02b0a-ce4b-452e-aa00-48c7823c13d0-trusted-ca-bundle\") pod \"console-f9d7485db-npbcn\" (UID: \"cee02b0a-ce4b-452e-aa00-48c7823c13d0\") " pod="openshift-console/console-f9d7485db-npbcn" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.517881 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cf6c1084-3ac6-4ac5-a15d-7e85f6cf75f5-config\") pod \"console-operator-58897d9998-drh5n\" (UID: \"cf6c1084-3ac6-4ac5-a15d-7e85f6cf75f5\") " pod="openshift-console-operator/console-operator-58897d9998-drh5n" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.518047 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09de6d60-7a17-4222-b6ea-457b9e58a937-config\") pod \"openshift-apiserver-operator-796bbdcf4f-92jlq\" (UID: \"09de6d60-7a17-4222-b6ea-457b9e58a937\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-92jlq" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.559869 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-g9t4q" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.612358 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/d7c10f1a-b1ca-4c58-882a-f5d834b31b5a-metrics-tls\") pod \"dns-operator-744455d44c-mvtxr\" (UID: \"d7c10f1a-b1ca-4c58-882a-f5d834b31b5a\") " pod="openshift-dns-operator/dns-operator-744455d44c-mvtxr" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.612405 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-86b74\" (UniqueName: \"kubernetes.io/projected/150d06be-be0f-4425-a584-760d19d009b5-kube-api-access-86b74\") pod \"etcd-operator-b45778765-p7hq2\" (UID: \"150d06be-be0f-4425-a584-760d19d009b5\") " pod="openshift-etcd-operator/etcd-operator-b45778765-p7hq2" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.612422 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/b4d4786f-591a-43fb-afe1-04c8daa257a7-metrics-certs\") pod \"router-default-5444994796-4qjqw\" (UID: \"b4d4786f-591a-43fb-afe1-04c8daa257a7\") " pod="openshift-ingress/router-default-5444994796-4qjqw" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.612438 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jswnm\" (UniqueName: \"kubernetes.io/projected/495babf4-9201-4523-8a21-44e001d4f4c1-kube-api-access-jswnm\") pod \"downloads-7954f5f757-xs4nd\" (UID: \"495babf4-9201-4523-8a21-44e001d4f4c1\") " pod="openshift-console/downloads-7954f5f757-xs4nd" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.612455 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/61f3ad0c-2191-4ea5-96ed-763ca80fbcba-trusted-ca\") pod \"ingress-operator-5b745b69d9-94w25\" (UID: \"61f3ad0c-2191-4ea5-96ed-763ca80fbcba\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-94w25" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.612469 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/b4d4786f-591a-43fb-afe1-04c8daa257a7-stats-auth\") pod \"router-default-5444994796-4qjqw\" (UID: \"b4d4786f-591a-43fb-afe1-04c8daa257a7\") " pod="openshift-ingress/router-default-5444994796-4qjqw" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.612537 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/b4d4786f-591a-43fb-afe1-04c8daa257a7-default-certificate\") pod \"router-default-5444994796-4qjqw\" (UID: \"b4d4786f-591a-43fb-afe1-04c8daa257a7\") " pod="openshift-ingress/router-default-5444994796-4qjqw" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.612574 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/61f3ad0c-2191-4ea5-96ed-763ca80fbcba-metrics-tls\") pod \"ingress-operator-5b745b69d9-94w25\" (UID: \"61f3ad0c-2191-4ea5-96ed-763ca80fbcba\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-94w25" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.612636 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/150d06be-be0f-4425-a584-760d19d009b5-etcd-service-ca\") pod \"etcd-operator-b45778765-p7hq2\" (UID: \"150d06be-be0f-4425-a584-760d19d009b5\") " pod="openshift-etcd-operator/etcd-operator-b45778765-p7hq2" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.612675 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/150d06be-be0f-4425-a584-760d19d009b5-etcd-client\") pod \"etcd-operator-b45778765-p7hq2\" (UID: \"150d06be-be0f-4425-a584-760d19d009b5\") " pod="openshift-etcd-operator/etcd-operator-b45778765-p7hq2" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.612690 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/150d06be-be0f-4425-a584-760d19d009b5-config\") pod \"etcd-operator-b45778765-p7hq2\" (UID: \"150d06be-be0f-4425-a584-760d19d009b5\") " pod="openshift-etcd-operator/etcd-operator-b45778765-p7hq2" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.612755 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/150d06be-be0f-4425-a584-760d19d009b5-serving-cert\") pod \"etcd-operator-b45778765-p7hq2\" (UID: \"150d06be-be0f-4425-a584-760d19d009b5\") " pod="openshift-etcd-operator/etcd-operator-b45778765-p7hq2" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.612777 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/61f3ad0c-2191-4ea5-96ed-763ca80fbcba-bound-sa-token\") pod \"ingress-operator-5b745b69d9-94w25\" (UID: \"61f3ad0c-2191-4ea5-96ed-763ca80fbcba\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-94w25" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.612808 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nzvcm\" (UniqueName: \"kubernetes.io/projected/61f3ad0c-2191-4ea5-96ed-763ca80fbcba-kube-api-access-nzvcm\") pod \"ingress-operator-5b745b69d9-94w25\" (UID: \"61f3ad0c-2191-4ea5-96ed-763ca80fbcba\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-94w25" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.612827 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cwk6j\" (UniqueName: \"kubernetes.io/projected/b4d4786f-591a-43fb-afe1-04c8daa257a7-kube-api-access-cwk6j\") pod \"router-default-5444994796-4qjqw\" (UID: \"b4d4786f-591a-43fb-afe1-04c8daa257a7\") " pod="openshift-ingress/router-default-5444994796-4qjqw" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.612851 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b4d4786f-591a-43fb-afe1-04c8daa257a7-service-ca-bundle\") pod \"router-default-5444994796-4qjqw\" (UID: \"b4d4786f-591a-43fb-afe1-04c8daa257a7\") " pod="openshift-ingress/router-default-5444994796-4qjqw" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.612880 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/150d06be-be0f-4425-a584-760d19d009b5-etcd-ca\") pod \"etcd-operator-b45778765-p7hq2\" (UID: \"150d06be-be0f-4425-a584-760d19d009b5\") " pod="openshift-etcd-operator/etcd-operator-b45778765-p7hq2" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.612895 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6qkg9\" (UniqueName: \"kubernetes.io/projected/d7c10f1a-b1ca-4c58-882a-f5d834b31b5a-kube-api-access-6qkg9\") pod \"dns-operator-744455d44c-mvtxr\" (UID: \"d7c10f1a-b1ca-4c58-882a-f5d834b31b5a\") " pod="openshift-dns-operator/dns-operator-744455d44c-mvtxr" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.613444 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/4f00accf-ea52-4f16-9749-4af762d99a60-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-jwrlg\" (UID: \"4f00accf-ea52-4f16-9749-4af762d99a60\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-jwrlg" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.613660 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/cee02b0a-ce4b-452e-aa00-48c7823c13d0-console-serving-cert\") pod \"console-f9d7485db-npbcn\" (UID: \"cee02b0a-ce4b-452e-aa00-48c7823c13d0\") " pod="openshift-console/console-f9d7485db-npbcn" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.615096 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cf6c1084-3ac6-4ac5-a15d-7e85f6cf75f5-serving-cert\") pod \"console-operator-58897d9998-drh5n\" (UID: \"cf6c1084-3ac6-4ac5-a15d-7e85f6cf75f5\") " pod="openshift-console-operator/console-operator-58897d9998-drh5n" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.615571 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/150d06be-be0f-4425-a584-760d19d009b5-etcd-client\") pod \"etcd-operator-b45778765-p7hq2\" (UID: \"150d06be-be0f-4425-a584-760d19d009b5\") " pod="openshift-etcd-operator/etcd-operator-b45778765-p7hq2" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.615879 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/150d06be-be0f-4425-a584-760d19d009b5-config\") pod \"etcd-operator-b45778765-p7hq2\" (UID: \"150d06be-be0f-4425-a584-760d19d009b5\") " pod="openshift-etcd-operator/etcd-operator-b45778765-p7hq2" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.615979 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/150d06be-be0f-4425-a584-760d19d009b5-etcd-service-ca\") pod \"etcd-operator-b45778765-p7hq2\" (UID: \"150d06be-be0f-4425-a584-760d19d009b5\") " pod="openshift-etcd-operator/etcd-operator-b45778765-p7hq2" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.616932 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/61f3ad0c-2191-4ea5-96ed-763ca80fbcba-trusted-ca\") pod \"ingress-operator-5b745b69d9-94w25\" (UID: \"61f3ad0c-2191-4ea5-96ed-763ca80fbcba\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-94w25" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.617801 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/150d06be-be0f-4425-a584-760d19d009b5-etcd-ca\") pod \"etcd-operator-b45778765-p7hq2\" (UID: \"150d06be-be0f-4425-a584-760d19d009b5\") " pod="openshift-etcd-operator/etcd-operator-b45778765-p7hq2" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.618422 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/150d06be-be0f-4425-a584-760d19d009b5-serving-cert\") pod \"etcd-operator-b45778765-p7hq2\" (UID: \"150d06be-be0f-4425-a584-760d19d009b5\") " pod="openshift-etcd-operator/etcd-operator-b45778765-p7hq2" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.624821 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/61f3ad0c-2191-4ea5-96ed-763ca80fbcba-metrics-tls\") pod \"ingress-operator-5b745b69d9-94w25\" (UID: \"61f3ad0c-2191-4ea5-96ed-763ca80fbcba\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-94w25" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.648686 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.649267 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.651282 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.651498 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.652237 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.653446 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.653456 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7652f19-206e-401f-8424-e2af50465b27-serving-cert\") pod \"authentication-operator-69f744f599-hppvr\" (UID: \"e7652f19-206e-401f-8424-e2af50465b27\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-hppvr" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.732058 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/41e7b2c5-c9fe-4000-830c-bf3351dd327f-machine-approver-tls\") pod \"machine-approver-56656f9798-5ww6p\" (UID: \"41e7b2c5-c9fe-4000-830c-bf3351dd327f\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-5ww6p" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.732462 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.735413 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/343f69ef-b8b8-459c-95d1-5234344b45e0-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-xzdsc\" (UID: \"343f69ef-b8b8-459c-95d1-5234344b45e0\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-xzdsc" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.735597 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a48bd0ed-4703-46dd-9586-6141cfe7b15e-serving-cert\") pod \"openshift-config-operator-7777fb866f-bvkqz\" (UID: \"a48bd0ed-4703-46dd-9586-6141cfe7b15e\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-bvkqz" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.735927 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/a48bd0ed-4703-46dd-9586-6141cfe7b15e-available-featuregates\") pod \"openshift-config-operator-7777fb866f-bvkqz\" (UID: \"a48bd0ed-4703-46dd-9586-6141cfe7b15e\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-bvkqz" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.737185 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/58dc4be2-81aa-4567-b800-1b77019a7eca-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-vr8wv\" (UID: \"58dc4be2-81aa-4567-b800-1b77019a7eca\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-vr8wv" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.739335 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/0fc961e1-eee3-4fd5-ac99-56b85320740b-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-tsg4h\" (UID: \"0fc961e1-eee3-4fd5-ac99-56b85320740b\") " pod="openshift-authentication/oauth-openshift-558db77b4-tsg4h" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.739429 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/0fc961e1-eee3-4fd5-ac99-56b85320740b-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-tsg4h\" (UID: \"0fc961e1-eee3-4fd5-ac99-56b85320740b\") " pod="openshift-authentication/oauth-openshift-558db77b4-tsg4h" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.739609 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/343f69ef-b8b8-459c-95d1-5234344b45e0-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-xzdsc\" (UID: \"343f69ef-b8b8-459c-95d1-5234344b45e0\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-xzdsc" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.739758 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/0fc961e1-eee3-4fd5-ac99-56b85320740b-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-tsg4h\" (UID: \"0fc961e1-eee3-4fd5-ac99-56b85320740b\") " pod="openshift-authentication/oauth-openshift-558db77b4-tsg4h" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.740454 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09de6d60-7a17-4222-b6ea-457b9e58a937-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-92jlq\" (UID: \"09de6d60-7a17-4222-b6ea-457b9e58a937\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-92jlq" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.741189 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.741310 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.741332 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/0fc961e1-eee3-4fd5-ac99-56b85320740b-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-tsg4h\" (UID: \"0fc961e1-eee3-4fd5-ac99-56b85320740b\") " pod="openshift-authentication/oauth-openshift-558db77b4-tsg4h" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.741557 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.744589 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/0fc961e1-eee3-4fd5-ac99-56b85320740b-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-tsg4h\" (UID: \"0fc961e1-eee3-4fd5-ac99-56b85320740b\") " pod="openshift-authentication/oauth-openshift-558db77b4-tsg4h" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.745852 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/0fc961e1-eee3-4fd5-ac99-56b85320740b-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-tsg4h\" (UID: \"0fc961e1-eee3-4fd5-ac99-56b85320740b\") " pod="openshift-authentication/oauth-openshift-558db77b4-tsg4h" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.748743 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.749588 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b4d4786f-591a-43fb-afe1-04c8daa257a7-service-ca-bundle\") pod \"router-default-5444994796-4qjqw\" (UID: \"b4d4786f-591a-43fb-afe1-04c8daa257a7\") " pod="openshift-ingress/router-default-5444994796-4qjqw" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.751450 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/0fc961e1-eee3-4fd5-ac99-56b85320740b-audit-policies\") pod \"oauth-openshift-558db77b4-tsg4h\" (UID: \"0fc961e1-eee3-4fd5-ac99-56b85320740b\") " pod="openshift-authentication/oauth-openshift-558db77b4-tsg4h" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.751615 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/0fc961e1-eee3-4fd5-ac99-56b85320740b-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-tsg4h\" (UID: \"0fc961e1-eee3-4fd5-ac99-56b85320740b\") " pod="openshift-authentication/oauth-openshift-558db77b4-tsg4h" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.752051 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/b4d4786f-591a-43fb-afe1-04c8daa257a7-metrics-certs\") pod \"router-default-5444994796-4qjqw\" (UID: \"b4d4786f-591a-43fb-afe1-04c8daa257a7\") " pod="openshift-ingress/router-default-5444994796-4qjqw" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.752184 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/b4d4786f-591a-43fb-afe1-04c8daa257a7-stats-auth\") pod \"router-default-5444994796-4qjqw\" (UID: \"b4d4786f-591a-43fb-afe1-04c8daa257a7\") " pod="openshift-ingress/router-default-5444994796-4qjqw" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.753945 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/0fc961e1-eee3-4fd5-ac99-56b85320740b-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-tsg4h\" (UID: \"0fc961e1-eee3-4fd5-ac99-56b85320740b\") " pod="openshift-authentication/oauth-openshift-558db77b4-tsg4h" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.756362 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/0fc961e1-eee3-4fd5-ac99-56b85320740b-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-tsg4h\" (UID: \"0fc961e1-eee3-4fd5-ac99-56b85320740b\") " pod="openshift-authentication/oauth-openshift-558db77b4-tsg4h" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.762693 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/d7c10f1a-b1ca-4c58-882a-f5d834b31b5a-metrics-tls\") pod \"dns-operator-744455d44c-mvtxr\" (UID: \"d7c10f1a-b1ca-4c58-882a-f5d834b31b5a\") " pod="openshift-dns-operator/dns-operator-744455d44c-mvtxr" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.791933 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/b4d4786f-591a-43fb-afe1-04c8daa257a7-default-certificate\") pod \"router-default-5444994796-4qjqw\" (UID: \"b4d4786f-591a-43fb-afe1-04c8daa257a7\") " pod="openshift-ingress/router-default-5444994796-4qjqw" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.793767 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.795913 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.864577 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.864950 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.866096 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.868277 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.888333 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.909141 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.933212 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.950708 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.975409 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Dec 06 15:34:28 crc kubenswrapper[5003]: I1206 15:34:28.988421 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Dec 06 15:34:29 crc kubenswrapper[5003]: I1206 15:34:29.008911 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Dec 06 15:34:29 crc kubenswrapper[5003]: I1206 15:34:29.036882 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-jx64p"] Dec 06 15:34:29 crc kubenswrapper[5003]: I1206 15:34:29.046774 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Dec 06 15:34:29 crc kubenswrapper[5003]: I1206 15:34:29.049390 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Dec 06 15:34:29 crc kubenswrapper[5003]: I1206 15:34:29.069036 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Dec 06 15:34:29 crc kubenswrapper[5003]: I1206 15:34:29.088468 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Dec 06 15:34:29 crc kubenswrapper[5003]: I1206 15:34:29.110383 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-78q2b"] Dec 06 15:34:29 crc kubenswrapper[5003]: I1206 15:34:29.111405 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Dec 06 15:34:29 crc kubenswrapper[5003]: I1206 15:34:29.130350 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Dec 06 15:34:29 crc kubenswrapper[5003]: I1206 15:34:29.148725 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Dec 06 15:34:29 crc kubenswrapper[5003]: I1206 15:34:29.162243 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-48sc5"] Dec 06 15:34:29 crc kubenswrapper[5003]: I1206 15:34:29.169694 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Dec 06 15:34:29 crc kubenswrapper[5003]: W1206 15:34:29.171226 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod58fe25e3_98d7_4725_841b_4bcd2e2f628f.slice/crio-4f0740dbac10446e4f8c992ebd90809dc354b55e5fc34f37af834185fb365b96 WatchSource:0}: Error finding container 4f0740dbac10446e4f8c992ebd90809dc354b55e5fc34f37af834185fb365b96: Status 404 returned error can't find the container with id 4f0740dbac10446e4f8c992ebd90809dc354b55e5fc34f37af834185fb365b96 Dec 06 15:34:29 crc kubenswrapper[5003]: I1206 15:34:29.189260 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Dec 06 15:34:29 crc kubenswrapper[5003]: I1206 15:34:29.209238 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Dec 06 15:34:29 crc kubenswrapper[5003]: I1206 15:34:29.229430 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Dec 06 15:34:29 crc kubenswrapper[5003]: I1206 15:34:29.248092 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Dec 06 15:34:29 crc kubenswrapper[5003]: I1206 15:34:29.269166 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Dec 06 15:34:29 crc kubenswrapper[5003]: I1206 15:34:29.289803 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Dec 06 15:34:29 crc kubenswrapper[5003]: I1206 15:34:29.302904 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-dh4ts"] Dec 06 15:34:29 crc kubenswrapper[5003]: I1206 15:34:29.308563 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Dec 06 15:34:29 crc kubenswrapper[5003]: W1206 15:34:29.317764 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod07dcad69_d3a4_40e2_a4d2_e83eb74631d7.slice/crio-d45469c858d8f8fb8cbf4020f468689921c2c6ab0e3a40817e33bcc0182aed52 WatchSource:0}: Error finding container d45469c858d8f8fb8cbf4020f468689921c2c6ab0e3a40817e33bcc0182aed52: Status 404 returned error can't find the container with id d45469c858d8f8fb8cbf4020f468689921c2c6ab0e3a40817e33bcc0182aed52 Dec 06 15:34:29 crc kubenswrapper[5003]: I1206 15:34:29.328015 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Dec 06 15:34:29 crc kubenswrapper[5003]: I1206 15:34:29.349078 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Dec 06 15:34:29 crc kubenswrapper[5003]: I1206 15:34:29.367944 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Dec 06 15:34:29 crc kubenswrapper[5003]: I1206 15:34:29.387615 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 06 15:34:29 crc kubenswrapper[5003]: I1206 15:34:29.408918 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 06 15:34:29 crc kubenswrapper[5003]: I1206 15:34:29.412829 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-g9t4q"] Dec 06 15:34:29 crc kubenswrapper[5003]: W1206 15:34:29.423291 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9990e306_5ae4_467e_9cc4_0225f9c05fc7.slice/crio-68c6897432878829293ce16b7a4c79f075d060bf066194281f2f0a3bec09891f WatchSource:0}: Error finding container 68c6897432878829293ce16b7a4c79f075d060bf066194281f2f0a3bec09891f: Status 404 returned error can't find the container with id 68c6897432878829293ce16b7a4c79f075d060bf066194281f2f0a3bec09891f Dec 06 15:34:29 crc kubenswrapper[5003]: I1206 15:34:29.429337 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Dec 06 15:34:29 crc kubenswrapper[5003]: I1206 15:34:29.435641 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-dh4ts" event={"ID":"07dcad69-d3a4-40e2-a4d2-e83eb74631d7","Type":"ContainerStarted","Data":"d45469c858d8f8fb8cbf4020f468689921c2c6ab0e3a40817e33bcc0182aed52"} Dec 06 15:34:29 crc kubenswrapper[5003]: I1206 15:34:29.436705 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-48sc5" event={"ID":"58fe25e3-98d7-4725-841b-4bcd2e2f628f","Type":"ContainerStarted","Data":"4f0740dbac10446e4f8c992ebd90809dc354b55e5fc34f37af834185fb365b96"} Dec 06 15:34:29 crc kubenswrapper[5003]: I1206 15:34:29.438243 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-78q2b" event={"ID":"86ec403c-0ca1-43ee-893e-917c87e5e174","Type":"ContainerStarted","Data":"b67120dc0ad9b400cd63bfd6ad74752fc397838509fea16a52428052ac145c9a"} Dec 06 15:34:29 crc kubenswrapper[5003]: I1206 15:34:29.438276 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-78q2b" event={"ID":"86ec403c-0ca1-43ee-893e-917c87e5e174","Type":"ContainerStarted","Data":"9b175169a625061d1d06bf03bbd22737d89a54457888ed71b6c18ee4eb6107cd"} Dec 06 15:34:29 crc kubenswrapper[5003]: I1206 15:34:29.438448 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-879f6c89f-78q2b" Dec 06 15:34:29 crc kubenswrapper[5003]: I1206 15:34:29.440623 5003 patch_prober.go:28] interesting pod/controller-manager-879f6c89f-78q2b container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.6:8443/healthz\": dial tcp 10.217.0.6:8443: connect: connection refused" start-of-body= Dec 06 15:34:29 crc kubenswrapper[5003]: I1206 15:34:29.440754 5003 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-879f6c89f-78q2b" podUID="86ec403c-0ca1-43ee-893e-917c87e5e174" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.6:8443/healthz\": dial tcp 10.217.0.6:8443: connect: connection refused" Dec 06 15:34:29 crc kubenswrapper[5003]: I1206 15:34:29.441140 5003 generic.go:334] "Generic (PLEG): container finished" podID="9280dab9-204d-4112-98be-c6809da2ad4e" containerID="c57c189256f1362e790146b25075dbeabae9b6788eed6c501692a8155895b517" exitCode=0 Dec 06 15:34:29 crc kubenswrapper[5003]: I1206 15:34:29.441230 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-jx64p" event={"ID":"9280dab9-204d-4112-98be-c6809da2ad4e","Type":"ContainerDied","Data":"c57c189256f1362e790146b25075dbeabae9b6788eed6c501692a8155895b517"} Dec 06 15:34:29 crc kubenswrapper[5003]: I1206 15:34:29.441310 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-jx64p" event={"ID":"9280dab9-204d-4112-98be-c6809da2ad4e","Type":"ContainerStarted","Data":"8e8341f101211ef4b2deed58b7e1dcd79279dbe26b26a6f57aa9be8950646ec2"} Dec 06 15:34:29 crc kubenswrapper[5003]: I1206 15:34:29.442332 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-g9t4q" event={"ID":"9990e306-5ae4-467e-9cc4-0225f9c05fc7","Type":"ContainerStarted","Data":"68c6897432878829293ce16b7a4c79f075d060bf066194281f2f0a3bec09891f"} Dec 06 15:34:29 crc kubenswrapper[5003]: I1206 15:34:29.448363 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Dec 06 15:34:29 crc kubenswrapper[5003]: I1206 15:34:29.468806 5003 request.go:700] Waited for 1.016213879s due to client-side throttling, not priority and fairness, request: GET:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-ingress-canary/configmaps?fieldSelector=metadata.name%3Dopenshift-service-ca.crt&limit=500&resourceVersion=0 Dec 06 15:34:29 crc kubenswrapper[5003]: I1206 15:34:29.469953 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Dec 06 15:34:29 crc kubenswrapper[5003]: I1206 15:34:29.489070 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Dec 06 15:34:29 crc kubenswrapper[5003]: I1206 15:34:29.509095 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Dec 06 15:34:29 crc kubenswrapper[5003]: I1206 15:34:29.528049 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Dec 06 15:34:29 crc kubenswrapper[5003]: I1206 15:34:29.555973 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Dec 06 15:34:29 crc kubenswrapper[5003]: I1206 15:34:29.568715 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Dec 06 15:34:29 crc kubenswrapper[5003]: I1206 15:34:29.588475 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Dec 06 15:34:29 crc kubenswrapper[5003]: I1206 15:34:29.611778 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Dec 06 15:34:29 crc kubenswrapper[5003]: I1206 15:34:29.628401 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Dec 06 15:34:29 crc kubenswrapper[5003]: I1206 15:34:29.649107 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Dec 06 15:34:29 crc kubenswrapper[5003]: I1206 15:34:29.668189 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Dec 06 15:34:29 crc kubenswrapper[5003]: I1206 15:34:29.689415 5003 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Dec 06 15:34:29 crc kubenswrapper[5003]: I1206 15:34:29.710555 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Dec 06 15:34:29 crc kubenswrapper[5003]: I1206 15:34:29.729959 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Dec 06 15:34:29 crc kubenswrapper[5003]: I1206 15:34:29.749782 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Dec 06 15:34:29 crc kubenswrapper[5003]: I1206 15:34:29.752049 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 06 15:34:29 crc kubenswrapper[5003]: E1206 15:34:29.752230 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-06 15:36:31.752201917 +0000 UTC m=+270.285556308 (durationBeforeRetry 2m2s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 15:34:29 crc kubenswrapper[5003]: I1206 15:34:29.752358 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 06 15:34:29 crc kubenswrapper[5003]: I1206 15:34:29.752526 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 06 15:34:29 crc kubenswrapper[5003]: I1206 15:34:29.752581 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 06 15:34:29 crc kubenswrapper[5003]: I1206 15:34:29.752689 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 06 15:34:29 crc kubenswrapper[5003]: I1206 15:34:29.754053 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 06 15:34:29 crc kubenswrapper[5003]: I1206 15:34:29.759236 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 06 15:34:29 crc kubenswrapper[5003]: I1206 15:34:29.761083 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 06 15:34:29 crc kubenswrapper[5003]: I1206 15:34:29.761878 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 06 15:34:29 crc kubenswrapper[5003]: I1206 15:34:29.769424 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Dec 06 15:34:29 crc kubenswrapper[5003]: I1206 15:34:29.788563 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Dec 06 15:34:29 crc kubenswrapper[5003]: I1206 15:34:29.809854 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Dec 06 15:34:29 crc kubenswrapper[5003]: I1206 15:34:29.828618 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Dec 06 15:34:29 crc kubenswrapper[5003]: I1206 15:34:29.848507 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Dec 06 15:34:29 crc kubenswrapper[5003]: I1206 15:34:29.907915 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wnwqk\" (UniqueName: \"kubernetes.io/projected/cee02b0a-ce4b-452e-aa00-48c7823c13d0-kube-api-access-wnwqk\") pod \"console-f9d7485db-npbcn\" (UID: \"cee02b0a-ce4b-452e-aa00-48c7823c13d0\") " pod="openshift-console/console-f9d7485db-npbcn" Dec 06 15:34:29 crc kubenswrapper[5003]: I1206 15:34:29.927414 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-26wpg\" (UniqueName: \"kubernetes.io/projected/cf6c1084-3ac6-4ac5-a15d-7e85f6cf75f5-kube-api-access-26wpg\") pod \"console-operator-58897d9998-drh5n\" (UID: \"cf6c1084-3ac6-4ac5-a15d-7e85f6cf75f5\") " pod="openshift-console-operator/console-operator-58897d9998-drh5n" Dec 06 15:34:29 crc kubenswrapper[5003]: I1206 15:34:29.927455 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 06 15:34:29 crc kubenswrapper[5003]: I1206 15:34:29.946179 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 06 15:34:29 crc kubenswrapper[5003]: I1206 15:34:29.946901 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/58dc4be2-81aa-4567-b800-1b77019a7eca-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-vr8wv\" (UID: \"58dc4be2-81aa-4567-b800-1b77019a7eca\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-vr8wv" Dec 06 15:34:29 crc kubenswrapper[5003]: I1206 15:34:29.955120 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/5cb1719e-962f-436c-bbc0-cd048de8dd14-ca-trust-extracted\") pod \"image-registry-697d97f7c8-tvwvh\" (UID: \"5cb1719e-962f-436c-bbc0-cd048de8dd14\") " pod="openshift-image-registry/image-registry-697d97f7c8-tvwvh" Dec 06 15:34:29 crc kubenswrapper[5003]: I1206 15:34:29.955184 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/5cb1719e-962f-436c-bbc0-cd048de8dd14-bound-sa-token\") pod \"image-registry-697d97f7c8-tvwvh\" (UID: \"5cb1719e-962f-436c-bbc0-cd048de8dd14\") " pod="openshift-image-registry/image-registry-697d97f7c8-tvwvh" Dec 06 15:34:29 crc kubenswrapper[5003]: I1206 15:34:29.955255 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/5cb1719e-962f-436c-bbc0-cd048de8dd14-trusted-ca\") pod \"image-registry-697d97f7c8-tvwvh\" (UID: \"5cb1719e-962f-436c-bbc0-cd048de8dd14\") " pod="openshift-image-registry/image-registry-697d97f7c8-tvwvh" Dec 06 15:34:29 crc kubenswrapper[5003]: I1206 15:34:29.955298 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/5cb1719e-962f-436c-bbc0-cd048de8dd14-installation-pull-secrets\") pod \"image-registry-697d97f7c8-tvwvh\" (UID: \"5cb1719e-962f-436c-bbc0-cd048de8dd14\") " pod="openshift-image-registry/image-registry-697d97f7c8-tvwvh" Dec 06 15:34:29 crc kubenswrapper[5003]: I1206 15:34:29.955315 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-htvtr\" (UniqueName: \"kubernetes.io/projected/5cb1719e-962f-436c-bbc0-cd048de8dd14-kube-api-access-htvtr\") pod \"image-registry-697d97f7c8-tvwvh\" (UID: \"5cb1719e-962f-436c-bbc0-cd048de8dd14\") " pod="openshift-image-registry/image-registry-697d97f7c8-tvwvh" Dec 06 15:34:29 crc kubenswrapper[5003]: I1206 15:34:29.955346 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-tvwvh\" (UID: \"5cb1719e-962f-436c-bbc0-cd048de8dd14\") " pod="openshift-image-registry/image-registry-697d97f7c8-tvwvh" Dec 06 15:34:29 crc kubenswrapper[5003]: I1206 15:34:29.955403 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/5cb1719e-962f-436c-bbc0-cd048de8dd14-registry-tls\") pod \"image-registry-697d97f7c8-tvwvh\" (UID: \"5cb1719e-962f-436c-bbc0-cd048de8dd14\") " pod="openshift-image-registry/image-registry-697d97f7c8-tvwvh" Dec 06 15:34:29 crc kubenswrapper[5003]: I1206 15:34:29.955434 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/5cb1719e-962f-436c-bbc0-cd048de8dd14-registry-certificates\") pod \"image-registry-697d97f7c8-tvwvh\" (UID: \"5cb1719e-962f-436c-bbc0-cd048de8dd14\") " pod="openshift-image-registry/image-registry-697d97f7c8-tvwvh" Dec 06 15:34:29 crc kubenswrapper[5003]: E1206 15:34:29.956086 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-06 15:34:30.456073804 +0000 UTC m=+148.989428235 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-tvwvh" (UID: "5cb1719e-962f-436c-bbc0-cd048de8dd14") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 15:34:29 crc kubenswrapper[5003]: I1206 15:34:29.967815 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ctd7v\" (UniqueName: \"kubernetes.io/projected/58dc4be2-81aa-4567-b800-1b77019a7eca-kube-api-access-ctd7v\") pod \"cluster-image-registry-operator-dc59b4c8b-vr8wv\" (UID: \"58dc4be2-81aa-4567-b800-1b77019a7eca\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-vr8wv" Dec 06 15:34:29 crc kubenswrapper[5003]: I1206 15:34:29.970986 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-npbcn" Dec 06 15:34:29 crc kubenswrapper[5003]: I1206 15:34:29.982372 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sccq2\" (UniqueName: \"kubernetes.io/projected/e7652f19-206e-401f-8424-e2af50465b27-kube-api-access-sccq2\") pod \"authentication-operator-69f744f599-hppvr\" (UID: \"e7652f19-206e-401f-8424-e2af50465b27\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-hppvr" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.007704 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fmltp\" (UniqueName: \"kubernetes.io/projected/4f00accf-ea52-4f16-9749-4af762d99a60-kube-api-access-fmltp\") pod \"cluster-samples-operator-665b6dd947-jwrlg\" (UID: \"4f00accf-ea52-4f16-9749-4af762d99a60\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-jwrlg" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.026006 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.037362 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r5cqd\" (UniqueName: \"kubernetes.io/projected/a48bd0ed-4703-46dd-9586-6141cfe7b15e-kube-api-access-r5cqd\") pod \"openshift-config-operator-7777fb866f-bvkqz\" (UID: \"a48bd0ed-4703-46dd-9586-6141cfe7b15e\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-bvkqz" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.042747 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fl57h\" (UniqueName: \"kubernetes.io/projected/09de6d60-7a17-4222-b6ea-457b9e58a937-kube-api-access-fl57h\") pod \"openshift-apiserver-operator-796bbdcf4f-92jlq\" (UID: \"09de6d60-7a17-4222-b6ea-457b9e58a937\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-92jlq" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.057046 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.057193 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c21f1e83-8b5a-4fee-b51e-3617d90b23f8-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-l8t2q\" (UID: \"c21f1e83-8b5a-4fee-b51e-3617d90b23f8\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-l8t2q" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.057228 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/4e8d2519-5570-4ac9-8968-3015f3658ef0-registration-dir\") pod \"csi-hostpathplugin-bk7j8\" (UID: \"4e8d2519-5570-4ac9-8968-3015f3658ef0\") " pod="hostpath-provisioner/csi-hostpathplugin-bk7j8" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.057244 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e6995aa9-a5fd-4994-bbc0-a6ed0b630fa1-config\") pod \"kube-controller-manager-operator-78b949d7b-8fpj2\" (UID: \"e6995aa9-a5fd-4994-bbc0-a6ed0b630fa1\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8fpj2" Dec 06 15:34:30 crc kubenswrapper[5003]: E1206 15:34:30.057277 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-06 15:34:30.557247237 +0000 UTC m=+149.090601618 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.057322 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/dc7def17-767d-47b6-ad2f-4cf73e84ea5e-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-854zg\" (UID: \"dc7def17-767d-47b6-ad2f-4cf73e84ea5e\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-854zg" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.057356 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/37f5fb4c-d8e0-421a-a921-a88e7a934b3a-config-volume\") pod \"collect-profiles-29417250-pmq24\" (UID: \"37f5fb4c-d8e0-421a-a921-a88e7a934b3a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29417250-pmq24" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.057382 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/4e8d2519-5570-4ac9-8968-3015f3658ef0-csi-data-dir\") pod \"csi-hostpathplugin-bk7j8\" (UID: \"4e8d2519-5570-4ac9-8968-3015f3658ef0\") " pod="hostpath-provisioner/csi-hostpathplugin-bk7j8" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.057424 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/5cb1719e-962f-436c-bbc0-cd048de8dd14-installation-pull-secrets\") pod \"image-registry-697d97f7c8-tvwvh\" (UID: \"5cb1719e-962f-436c-bbc0-cd048de8dd14\") " pod="openshift-image-registry/image-registry-697d97f7c8-tvwvh" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.057443 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-45dwk\" (UniqueName: \"kubernetes.io/projected/de472d67-6d24-44c5-becf-1cd20d390264-kube-api-access-45dwk\") pod \"dns-default-wx2w9\" (UID: \"de472d67-6d24-44c5-becf-1cd20d390264\") " pod="openshift-dns/dns-default-wx2w9" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.057459 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/de472d67-6d24-44c5-becf-1cd20d390264-config-volume\") pod \"dns-default-wx2w9\" (UID: \"de472d67-6d24-44c5-becf-1cd20d390264\") " pod="openshift-dns/dns-default-wx2w9" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.057508 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vxpbs\" (UniqueName: \"kubernetes.io/projected/60490c57-c15a-4479-a735-257c6f60f1b0-kube-api-access-vxpbs\") pod \"machine-config-operator-74547568cd-7w4jb\" (UID: \"60490c57-c15a-4479-a735-257c6f60f1b0\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-7w4jb" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.057599 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xm4g5\" (UniqueName: \"kubernetes.io/projected/c36f84f6-f4ce-40bd-b151-211f4face9e1-kube-api-access-xm4g5\") pod \"machine-config-server-ftnnx\" (UID: \"c36f84f6-f4ce-40bd-b151-211f4face9e1\") " pod="openshift-machine-config-operator/machine-config-server-ftnnx" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.057617 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/e109ae1a-d737-4150-9e67-728d9d8d32dc-srv-cert\") pod \"catalog-operator-68c6474976-rqfnv\" (UID: \"e109ae1a-d737-4150-9e67-728d9d8d32dc\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-rqfnv" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.057642 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k5m52\" (UniqueName: \"kubernetes.io/projected/8e450a8e-52f9-48fe-96c8-8f444a7437fe-kube-api-access-k5m52\") pod \"control-plane-machine-set-operator-78cbb6b69f-x6k88\" (UID: \"8e450a8e-52f9-48fe-96c8-8f444a7437fe\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-x6k88" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.057924 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zg49q\" (UniqueName: \"kubernetes.io/projected/e109ae1a-d737-4150-9e67-728d9d8d32dc-kube-api-access-zg49q\") pod \"catalog-operator-68c6474976-rqfnv\" (UID: \"e109ae1a-d737-4150-9e67-728d9d8d32dc\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-rqfnv" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.057956 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/e109ae1a-d737-4150-9e67-728d9d8d32dc-profile-collector-cert\") pod \"catalog-operator-68c6474976-rqfnv\" (UID: \"e109ae1a-d737-4150-9e67-728d9d8d32dc\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-rqfnv" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.057973 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/765bb4a4-7c41-414b-a9be-a54be49b76ff-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-lwc4r\" (UID: \"765bb4a4-7c41-414b-a9be-a54be49b76ff\") " pod="openshift-marketplace/marketplace-operator-79b997595-lwc4r" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.058029 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/765bb4a4-7c41-414b-a9be-a54be49b76ff-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-lwc4r\" (UID: \"765bb4a4-7c41-414b-a9be-a54be49b76ff\") " pod="openshift-marketplace/marketplace-operator-79b997595-lwc4r" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.058057 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/460fedd0-6ec4-4ef5-91ca-cc62ae21ebc4-config\") pod \"service-ca-operator-777779d784-rjl56\" (UID: \"460fedd0-6ec4-4ef5-91ca-cc62ae21ebc4\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-rjl56" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.058075 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e6995aa9-a5fd-4994-bbc0-a6ed0b630fa1-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-8fpj2\" (UID: \"e6995aa9-a5fd-4994-bbc0-a6ed0b630fa1\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8fpj2" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.058699 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/5cb1719e-962f-436c-bbc0-cd048de8dd14-registry-certificates\") pod \"image-registry-697d97f7c8-tvwvh\" (UID: \"5cb1719e-962f-436c-bbc0-cd048de8dd14\") " pod="openshift-image-registry/image-registry-697d97f7c8-tvwvh" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.058723 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/460fedd0-6ec4-4ef5-91ca-cc62ae21ebc4-serving-cert\") pod \"service-ca-operator-777779d784-rjl56\" (UID: \"460fedd0-6ec4-4ef5-91ca-cc62ae21ebc4\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-rjl56" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.058745 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/0fca25a0-30bc-4906-8557-552531236ee4-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-dc5hk\" (UID: \"0fca25a0-30bc-4906-8557-552531236ee4\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-dc5hk" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.059340 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/dc7def17-767d-47b6-ad2f-4cf73e84ea5e-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-854zg\" (UID: \"dc7def17-767d-47b6-ad2f-4cf73e84ea5e\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-854zg" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.059360 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/37f5fb4c-d8e0-421a-a921-a88e7a934b3a-secret-volume\") pod \"collect-profiles-29417250-pmq24\" (UID: \"37f5fb4c-d8e0-421a-a921-a88e7a934b3a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29417250-pmq24" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.059392 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/4e8d2519-5570-4ac9-8968-3015f3658ef0-plugins-dir\") pod \"csi-hostpathplugin-bk7j8\" (UID: \"4e8d2519-5570-4ac9-8968-3015f3658ef0\") " pod="hostpath-provisioner/csi-hostpathplugin-bk7j8" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.059409 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nmczj\" (UniqueName: \"kubernetes.io/projected/37f5fb4c-d8e0-421a-a921-a88e7a934b3a-kube-api-access-nmczj\") pod \"collect-profiles-29417250-pmq24\" (UID: \"37f5fb4c-d8e0-421a-a921-a88e7a934b3a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29417250-pmq24" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.059715 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/5cb1719e-962f-436c-bbc0-cd048de8dd14-registry-certificates\") pod \"image-registry-697d97f7c8-tvwvh\" (UID: \"5cb1719e-962f-436c-bbc0-cd048de8dd14\") " pod="openshift-image-registry/image-registry-697d97f7c8-tvwvh" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.060036 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/5cb1719e-962f-436c-bbc0-cd048de8dd14-ca-trust-extracted\") pod \"image-registry-697d97f7c8-tvwvh\" (UID: \"5cb1719e-962f-436c-bbc0-cd048de8dd14\") " pod="openshift-image-registry/image-registry-697d97f7c8-tvwvh" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.060115 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/1f3db07f-b32b-46e0-b697-a5140c5021cd-profile-collector-cert\") pod \"olm-operator-6b444d44fb-pt2wd\" (UID: \"1f3db07f-b32b-46e0-b697-a5140c5021cd\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-pt2wd" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.060159 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/af89c7bf-ebbc-464e-b11a-c7343acbf887-proxy-tls\") pod \"machine-config-controller-84d6567774-xvg94\" (UID: \"af89c7bf-ebbc-464e-b11a-c7343acbf887\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-xvg94" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.060407 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/5cb1719e-962f-436c-bbc0-cd048de8dd14-ca-trust-extracted\") pod \"image-registry-697d97f7c8-tvwvh\" (UID: \"5cb1719e-962f-436c-bbc0-cd048de8dd14\") " pod="openshift-image-registry/image-registry-697d97f7c8-tvwvh" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.060763 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/3a9d6180-dff8-46d9-92df-20ff4b1b466f-webhook-cert\") pod \"packageserver-d55dfcdfc-mhpsl\" (UID: \"3a9d6180-dff8-46d9-92df-20ff4b1b466f\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-mhpsl" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.060811 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/60490c57-c15a-4479-a735-257c6f60f1b0-images\") pod \"machine-config-operator-74547568cd-7w4jb\" (UID: \"60490c57-c15a-4479-a735-257c6f60f1b0\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-7w4jb" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.060840 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b2tmn\" (UniqueName: \"kubernetes.io/projected/c21f1e83-8b5a-4fee-b51e-3617d90b23f8-kube-api-access-b2tmn\") pod \"kube-storage-version-migrator-operator-b67b599dd-l8t2q\" (UID: \"c21f1e83-8b5a-4fee-b51e-3617d90b23f8\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-l8t2q" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.060926 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/5cb1719e-962f-436c-bbc0-cd048de8dd14-bound-sa-token\") pod \"image-registry-697d97f7c8-tvwvh\" (UID: \"5cb1719e-962f-436c-bbc0-cd048de8dd14\") " pod="openshift-image-registry/image-registry-697d97f7c8-tvwvh" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.061005 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b9g6f\" (UniqueName: \"kubernetes.io/projected/4e8d2519-5570-4ac9-8968-3015f3658ef0-kube-api-access-b9g6f\") pod \"csi-hostpathplugin-bk7j8\" (UID: \"4e8d2519-5570-4ac9-8968-3015f3658ef0\") " pod="hostpath-provisioner/csi-hostpathplugin-bk7j8" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.061051 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/de472d67-6d24-44c5-becf-1cd20d390264-metrics-tls\") pod \"dns-default-wx2w9\" (UID: \"de472d67-6d24-44c5-becf-1cd20d390264\") " pod="openshift-dns/dns-default-wx2w9" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.061086 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/fb3e806c-c739-4628-bf9c-8745195fce4c-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-fp6zg\" (UID: \"fb3e806c-c739-4628-bf9c-8745195fce4c\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-fp6zg" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.061134 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/af89c7bf-ebbc-464e-b11a-c7343acbf887-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-xvg94\" (UID: \"af89c7bf-ebbc-464e-b11a-c7343acbf887\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-xvg94" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.061425 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/5cb1719e-962f-436c-bbc0-cd048de8dd14-trusted-ca\") pod \"image-registry-697d97f7c8-tvwvh\" (UID: \"5cb1719e-962f-436c-bbc0-cd048de8dd14\") " pod="openshift-image-registry/image-registry-697d97f7c8-tvwvh" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.061624 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/1f3db07f-b32b-46e0-b697-a5140c5021cd-srv-cert\") pod \"olm-operator-6b444d44fb-pt2wd\" (UID: \"1f3db07f-b32b-46e0-b697-a5140c5021cd\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-pt2wd" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.061664 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/8e450a8e-52f9-48fe-96c8-8f444a7437fe-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-x6k88\" (UID: \"8e450a8e-52f9-48fe-96c8-8f444a7437fe\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-x6k88" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.061693 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/4e8d2519-5570-4ac9-8968-3015f3658ef0-socket-dir\") pod \"csi-hostpathplugin-bk7j8\" (UID: \"4e8d2519-5570-4ac9-8968-3015f3658ef0\") " pod="hostpath-provisioner/csi-hostpathplugin-bk7j8" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.061721 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b6jsr\" (UniqueName: \"kubernetes.io/projected/d32d51fd-e8e1-4bc7-a1b4-be5c851a8651-kube-api-access-b6jsr\") pod \"migrator-59844c95c7-gqgvg\" (UID: \"d32d51fd-e8e1-4bc7-a1b4-be5c851a8651\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-gqgvg" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.061812 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e6995aa9-a5fd-4994-bbc0-a6ed0b630fa1-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-8fpj2\" (UID: \"e6995aa9-a5fd-4994-bbc0-a6ed0b630fa1\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8fpj2" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.062581 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-htvtr\" (UniqueName: \"kubernetes.io/projected/5cb1719e-962f-436c-bbc0-cd048de8dd14-kube-api-access-htvtr\") pod \"image-registry-697d97f7c8-tvwvh\" (UID: \"5cb1719e-962f-436c-bbc0-cd048de8dd14\") " pod="openshift-image-registry/image-registry-697d97f7c8-tvwvh" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.062612 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/3a9d6180-dff8-46d9-92df-20ff4b1b466f-apiservice-cert\") pod \"packageserver-d55dfcdfc-mhpsl\" (UID: \"3a9d6180-dff8-46d9-92df-20ff4b1b466f\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-mhpsl" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.062652 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/c36f84f6-f4ce-40bd-b151-211f4face9e1-certs\") pod \"machine-config-server-ftnnx\" (UID: \"c36f84f6-f4ce-40bd-b151-211f4face9e1\") " pod="openshift-machine-config-operator/machine-config-server-ftnnx" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.062676 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fkj29\" (UniqueName: \"kubernetes.io/projected/78a9da4a-a389-4286-a4cc-d2924052721a-kube-api-access-fkj29\") pod \"ingress-canary-t2c5q\" (UID: \"78a9da4a-a389-4286-a4cc-d2924052721a\") " pod="openshift-ingress-canary/ingress-canary-t2c5q" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.062959 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w8dpk\" (UniqueName: \"kubernetes.io/projected/e3de71c3-340a-4492-8235-043eeb8bc509-kube-api-access-w8dpk\") pod \"service-ca-9c57cc56f-rrdrz\" (UID: \"e3de71c3-340a-4492-8235-043eeb8bc509\") " pod="openshift-service-ca/service-ca-9c57cc56f-rrdrz" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.063176 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-tvwvh\" (UID: \"5cb1719e-962f-436c-bbc0-cd048de8dd14\") " pod="openshift-image-registry/image-registry-697d97f7c8-tvwvh" Dec 06 15:34:30 crc kubenswrapper[5003]: E1206 15:34:30.063459 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-06 15:34:30.563445423 +0000 UTC m=+149.096799804 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-tvwvh" (UID: "5cb1719e-962f-436c-bbc0-cd048de8dd14") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.063828 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q5jk9\" (UniqueName: \"kubernetes.io/projected/fb3e806c-c739-4628-bf9c-8745195fce4c-kube-api-access-q5jk9\") pod \"multus-admission-controller-857f4d67dd-fp6zg\" (UID: \"fb3e806c-c739-4628-bf9c-8745195fce4c\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-fp6zg" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.063862 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c21f1e83-8b5a-4fee-b51e-3617d90b23f8-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-l8t2q\" (UID: \"c21f1e83-8b5a-4fee-b51e-3617d90b23f8\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-l8t2q" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.063912 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/4e8d2519-5570-4ac9-8968-3015f3658ef0-mountpoint-dir\") pod \"csi-hostpathplugin-bk7j8\" (UID: \"4e8d2519-5570-4ac9-8968-3015f3658ef0\") " pod="hostpath-provisioner/csi-hostpathplugin-bk7j8" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.063943 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/c36f84f6-f4ce-40bd-b151-211f4face9e1-node-bootstrap-token\") pod \"machine-config-server-ftnnx\" (UID: \"c36f84f6-f4ce-40bd-b151-211f4face9e1\") " pod="openshift-machine-config-operator/machine-config-server-ftnnx" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.063964 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/78a9da4a-a389-4286-a4cc-d2924052721a-cert\") pod \"ingress-canary-t2c5q\" (UID: \"78a9da4a-a389-4286-a4cc-d2924052721a\") " pod="openshift-ingress-canary/ingress-canary-t2c5q" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.064189 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pdhbk\" (UniqueName: \"kubernetes.io/projected/460fedd0-6ec4-4ef5-91ca-cc62ae21ebc4-kube-api-access-pdhbk\") pod \"service-ca-operator-777779d784-rjl56\" (UID: \"460fedd0-6ec4-4ef5-91ca-cc62ae21ebc4\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-rjl56" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.064220 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cbz6m\" (UniqueName: \"kubernetes.io/projected/765bb4a4-7c41-414b-a9be-a54be49b76ff-kube-api-access-cbz6m\") pod \"marketplace-operator-79b997595-lwc4r\" (UID: \"765bb4a4-7c41-414b-a9be-a54be49b76ff\") " pod="openshift-marketplace/marketplace-operator-79b997595-lwc4r" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.064296 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/5cb1719e-962f-436c-bbc0-cd048de8dd14-registry-tls\") pod \"image-registry-697d97f7c8-tvwvh\" (UID: \"5cb1719e-962f-436c-bbc0-cd048de8dd14\") " pod="openshift-image-registry/image-registry-697d97f7c8-tvwvh" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.066767 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z5v42\" (UniqueName: \"kubernetes.io/projected/1f3db07f-b32b-46e0-b697-a5140c5021cd-kube-api-access-z5v42\") pod \"olm-operator-6b444d44fb-pt2wd\" (UID: \"1f3db07f-b32b-46e0-b697-a5140c5021cd\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-pt2wd" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.066912 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/e3de71c3-340a-4492-8235-043eeb8bc509-signing-cabundle\") pod \"service-ca-9c57cc56f-rrdrz\" (UID: \"e3de71c3-340a-4492-8235-043eeb8bc509\") " pod="openshift-service-ca/service-ca-9c57cc56f-rrdrz" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.067288 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-27jp7\" (UniqueName: \"kubernetes.io/projected/41e7b2c5-c9fe-4000-830c-bf3351dd327f-kube-api-access-27jp7\") pod \"machine-approver-56656f9798-5ww6p\" (UID: \"41e7b2c5-c9fe-4000-830c-bf3351dd327f\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-5ww6p" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.067310 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/3a9d6180-dff8-46d9-92df-20ff4b1b466f-tmpfs\") pod \"packageserver-d55dfcdfc-mhpsl\" (UID: \"3a9d6180-dff8-46d9-92df-20ff4b1b466f\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-mhpsl" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.067417 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/60490c57-c15a-4479-a735-257c6f60f1b0-proxy-tls\") pod \"machine-config-operator-74547568cd-7w4jb\" (UID: \"60490c57-c15a-4479-a735-257c6f60f1b0\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-7w4jb" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.067441 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/60490c57-c15a-4479-a735-257c6f60f1b0-auth-proxy-config\") pod \"machine-config-operator-74547568cd-7w4jb\" (UID: \"60490c57-c15a-4479-a735-257c6f60f1b0\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-7w4jb" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.067469 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qvls4\" (UniqueName: \"kubernetes.io/projected/3a9d6180-dff8-46d9-92df-20ff4b1b466f-kube-api-access-qvls4\") pod \"packageserver-d55dfcdfc-mhpsl\" (UID: \"3a9d6180-dff8-46d9-92df-20ff4b1b466f\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-mhpsl" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.067511 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9c863b8e-b487-438d-a745-e3f41c2ef92c-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-2qwm4\" (UID: \"9c863b8e-b487-438d-a745-e3f41c2ef92c\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-2qwm4" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.067534 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/5cb1719e-962f-436c-bbc0-cd048de8dd14-installation-pull-secrets\") pod \"image-registry-697d97f7c8-tvwvh\" (UID: \"5cb1719e-962f-436c-bbc0-cd048de8dd14\") " pod="openshift-image-registry/image-registry-697d97f7c8-tvwvh" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.067555 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/9c863b8e-b487-438d-a745-e3f41c2ef92c-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-2qwm4\" (UID: \"9c863b8e-b487-438d-a745-e3f41c2ef92c\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-2qwm4" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.067800 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9fq4h\" (UniqueName: \"kubernetes.io/projected/af89c7bf-ebbc-464e-b11a-c7343acbf887-kube-api-access-9fq4h\") pod \"machine-config-controller-84d6567774-xvg94\" (UID: \"af89c7bf-ebbc-464e-b11a-c7343acbf887\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-xvg94" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.067825 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dc7def17-767d-47b6-ad2f-4cf73e84ea5e-config\") pod \"kube-apiserver-operator-766d6c64bb-854zg\" (UID: \"dc7def17-767d-47b6-ad2f-4cf73e84ea5e\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-854zg" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.067847 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/e3de71c3-340a-4492-8235-043eeb8bc509-signing-key\") pod \"service-ca-9c57cc56f-rrdrz\" (UID: \"e3de71c3-340a-4492-8235-043eeb8bc509\") " pod="openshift-service-ca/service-ca-9c57cc56f-rrdrz" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.067878 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bgnhg\" (UniqueName: \"kubernetes.io/projected/0fca25a0-30bc-4906-8557-552531236ee4-kube-api-access-bgnhg\") pod \"package-server-manager-789f6589d5-dc5hk\" (UID: \"0fca25a0-30bc-4906-8557-552531236ee4\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-dc5hk" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.067897 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9c863b8e-b487-438d-a745-e3f41c2ef92c-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-2qwm4\" (UID: \"9c863b8e-b487-438d-a745-e3f41c2ef92c\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-2qwm4" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.069133 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/5cb1719e-962f-436c-bbc0-cd048de8dd14-registry-tls\") pod \"image-registry-697d97f7c8-tvwvh\" (UID: \"5cb1719e-962f-436c-bbc0-cd048de8dd14\") " pod="openshift-image-registry/image-registry-697d97f7c8-tvwvh" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.073312 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/5cb1719e-962f-436c-bbc0-cd048de8dd14-trusted-ca\") pod \"image-registry-697d97f7c8-tvwvh\" (UID: \"5cb1719e-962f-436c-bbc0-cd048de8dd14\") " pod="openshift-image-registry/image-registry-697d97f7c8-tvwvh" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.093075 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rzl5n\" (UniqueName: \"kubernetes.io/projected/343f69ef-b8b8-459c-95d1-5234344b45e0-kube-api-access-rzl5n\") pod \"openshift-controller-manager-operator-756b6f6bc6-xzdsc\" (UID: \"343f69ef-b8b8-459c-95d1-5234344b45e0\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-xzdsc" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.115908 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-5ww6p" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.126262 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2g4n6\" (UniqueName: \"kubernetes.io/projected/0fc961e1-eee3-4fd5-ac99-56b85320740b-kube-api-access-2g4n6\") pod \"oauth-openshift-558db77b4-tsg4h\" (UID: \"0fc961e1-eee3-4fd5-ac99-56b85320740b\") " pod="openshift-authentication/oauth-openshift-558db77b4-tsg4h" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.156569 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-drh5n" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.160115 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6qkg9\" (UniqueName: \"kubernetes.io/projected/d7c10f1a-b1ca-4c58-882a-f5d834b31b5a-kube-api-access-6qkg9\") pod \"dns-operator-744455d44c-mvtxr\" (UID: \"d7c10f1a-b1ca-4c58-882a-f5d834b31b5a\") " pod="openshift-dns-operator/dns-operator-744455d44c-mvtxr" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.169126 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.169272 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c21f1e83-8b5a-4fee-b51e-3617d90b23f8-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-l8t2q\" (UID: \"c21f1e83-8b5a-4fee-b51e-3617d90b23f8\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-l8t2q" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.169295 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/4e8d2519-5570-4ac9-8968-3015f3658ef0-registration-dir\") pod \"csi-hostpathplugin-bk7j8\" (UID: \"4e8d2519-5570-4ac9-8968-3015f3658ef0\") " pod="hostpath-provisioner/csi-hostpathplugin-bk7j8" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.169310 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e6995aa9-a5fd-4994-bbc0-a6ed0b630fa1-config\") pod \"kube-controller-manager-operator-78b949d7b-8fpj2\" (UID: \"e6995aa9-a5fd-4994-bbc0-a6ed0b630fa1\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8fpj2" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.169327 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/dc7def17-767d-47b6-ad2f-4cf73e84ea5e-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-854zg\" (UID: \"dc7def17-767d-47b6-ad2f-4cf73e84ea5e\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-854zg" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.169343 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/37f5fb4c-d8e0-421a-a921-a88e7a934b3a-config-volume\") pod \"collect-profiles-29417250-pmq24\" (UID: \"37f5fb4c-d8e0-421a-a921-a88e7a934b3a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29417250-pmq24" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.169357 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/4e8d2519-5570-4ac9-8968-3015f3658ef0-csi-data-dir\") pod \"csi-hostpathplugin-bk7j8\" (UID: \"4e8d2519-5570-4ac9-8968-3015f3658ef0\") " pod="hostpath-provisioner/csi-hostpathplugin-bk7j8" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.169377 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-45dwk\" (UniqueName: \"kubernetes.io/projected/de472d67-6d24-44c5-becf-1cd20d390264-kube-api-access-45dwk\") pod \"dns-default-wx2w9\" (UID: \"de472d67-6d24-44c5-becf-1cd20d390264\") " pod="openshift-dns/dns-default-wx2w9" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.169393 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/de472d67-6d24-44c5-becf-1cd20d390264-config-volume\") pod \"dns-default-wx2w9\" (UID: \"de472d67-6d24-44c5-becf-1cd20d390264\") " pod="openshift-dns/dns-default-wx2w9" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.169409 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vxpbs\" (UniqueName: \"kubernetes.io/projected/60490c57-c15a-4479-a735-257c6f60f1b0-kube-api-access-vxpbs\") pod \"machine-config-operator-74547568cd-7w4jb\" (UID: \"60490c57-c15a-4479-a735-257c6f60f1b0\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-7w4jb" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.169428 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xm4g5\" (UniqueName: \"kubernetes.io/projected/c36f84f6-f4ce-40bd-b151-211f4face9e1-kube-api-access-xm4g5\") pod \"machine-config-server-ftnnx\" (UID: \"c36f84f6-f4ce-40bd-b151-211f4face9e1\") " pod="openshift-machine-config-operator/machine-config-server-ftnnx" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.169442 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/e109ae1a-d737-4150-9e67-728d9d8d32dc-srv-cert\") pod \"catalog-operator-68c6474976-rqfnv\" (UID: \"e109ae1a-d737-4150-9e67-728d9d8d32dc\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-rqfnv" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.169458 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k5m52\" (UniqueName: \"kubernetes.io/projected/8e450a8e-52f9-48fe-96c8-8f444a7437fe-kube-api-access-k5m52\") pod \"control-plane-machine-set-operator-78cbb6b69f-x6k88\" (UID: \"8e450a8e-52f9-48fe-96c8-8f444a7437fe\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-x6k88" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.169472 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zg49q\" (UniqueName: \"kubernetes.io/projected/e109ae1a-d737-4150-9e67-728d9d8d32dc-kube-api-access-zg49q\") pod \"catalog-operator-68c6474976-rqfnv\" (UID: \"e109ae1a-d737-4150-9e67-728d9d8d32dc\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-rqfnv" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.169501 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/e109ae1a-d737-4150-9e67-728d9d8d32dc-profile-collector-cert\") pod \"catalog-operator-68c6474976-rqfnv\" (UID: \"e109ae1a-d737-4150-9e67-728d9d8d32dc\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-rqfnv" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.169517 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/765bb4a4-7c41-414b-a9be-a54be49b76ff-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-lwc4r\" (UID: \"765bb4a4-7c41-414b-a9be-a54be49b76ff\") " pod="openshift-marketplace/marketplace-operator-79b997595-lwc4r" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.169531 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/765bb4a4-7c41-414b-a9be-a54be49b76ff-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-lwc4r\" (UID: \"765bb4a4-7c41-414b-a9be-a54be49b76ff\") " pod="openshift-marketplace/marketplace-operator-79b997595-lwc4r" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.169546 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/460fedd0-6ec4-4ef5-91ca-cc62ae21ebc4-config\") pod \"service-ca-operator-777779d784-rjl56\" (UID: \"460fedd0-6ec4-4ef5-91ca-cc62ae21ebc4\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-rjl56" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.169588 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e6995aa9-a5fd-4994-bbc0-a6ed0b630fa1-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-8fpj2\" (UID: \"e6995aa9-a5fd-4994-bbc0-a6ed0b630fa1\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8fpj2" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.169610 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/460fedd0-6ec4-4ef5-91ca-cc62ae21ebc4-serving-cert\") pod \"service-ca-operator-777779d784-rjl56\" (UID: \"460fedd0-6ec4-4ef5-91ca-cc62ae21ebc4\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-rjl56" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.169625 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/0fca25a0-30bc-4906-8557-552531236ee4-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-dc5hk\" (UID: \"0fca25a0-30bc-4906-8557-552531236ee4\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-dc5hk" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.169643 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/dc7def17-767d-47b6-ad2f-4cf73e84ea5e-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-854zg\" (UID: \"dc7def17-767d-47b6-ad2f-4cf73e84ea5e\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-854zg" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.169656 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/37f5fb4c-d8e0-421a-a921-a88e7a934b3a-secret-volume\") pod \"collect-profiles-29417250-pmq24\" (UID: \"37f5fb4c-d8e0-421a-a921-a88e7a934b3a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29417250-pmq24" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.169670 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/4e8d2519-5570-4ac9-8968-3015f3658ef0-plugins-dir\") pod \"csi-hostpathplugin-bk7j8\" (UID: \"4e8d2519-5570-4ac9-8968-3015f3658ef0\") " pod="hostpath-provisioner/csi-hostpathplugin-bk7j8" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.169685 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nmczj\" (UniqueName: \"kubernetes.io/projected/37f5fb4c-d8e0-421a-a921-a88e7a934b3a-kube-api-access-nmczj\") pod \"collect-profiles-29417250-pmq24\" (UID: \"37f5fb4c-d8e0-421a-a921-a88e7a934b3a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29417250-pmq24" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.169702 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/1f3db07f-b32b-46e0-b697-a5140c5021cd-profile-collector-cert\") pod \"olm-operator-6b444d44fb-pt2wd\" (UID: \"1f3db07f-b32b-46e0-b697-a5140c5021cd\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-pt2wd" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.169716 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/af89c7bf-ebbc-464e-b11a-c7343acbf887-proxy-tls\") pod \"machine-config-controller-84d6567774-xvg94\" (UID: \"af89c7bf-ebbc-464e-b11a-c7343acbf887\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-xvg94" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.169740 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/3a9d6180-dff8-46d9-92df-20ff4b1b466f-webhook-cert\") pod \"packageserver-d55dfcdfc-mhpsl\" (UID: \"3a9d6180-dff8-46d9-92df-20ff4b1b466f\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-mhpsl" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.169753 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/60490c57-c15a-4479-a735-257c6f60f1b0-images\") pod \"machine-config-operator-74547568cd-7w4jb\" (UID: \"60490c57-c15a-4479-a735-257c6f60f1b0\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-7w4jb" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.169768 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b2tmn\" (UniqueName: \"kubernetes.io/projected/c21f1e83-8b5a-4fee-b51e-3617d90b23f8-kube-api-access-b2tmn\") pod \"kube-storage-version-migrator-operator-b67b599dd-l8t2q\" (UID: \"c21f1e83-8b5a-4fee-b51e-3617d90b23f8\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-l8t2q" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.169793 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b9g6f\" (UniqueName: \"kubernetes.io/projected/4e8d2519-5570-4ac9-8968-3015f3658ef0-kube-api-access-b9g6f\") pod \"csi-hostpathplugin-bk7j8\" (UID: \"4e8d2519-5570-4ac9-8968-3015f3658ef0\") " pod="hostpath-provisioner/csi-hostpathplugin-bk7j8" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.169807 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/fb3e806c-c739-4628-bf9c-8745195fce4c-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-fp6zg\" (UID: \"fb3e806c-c739-4628-bf9c-8745195fce4c\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-fp6zg" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.169820 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/de472d67-6d24-44c5-becf-1cd20d390264-metrics-tls\") pod \"dns-default-wx2w9\" (UID: \"de472d67-6d24-44c5-becf-1cd20d390264\") " pod="openshift-dns/dns-default-wx2w9" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.169836 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/af89c7bf-ebbc-464e-b11a-c7343acbf887-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-xvg94\" (UID: \"af89c7bf-ebbc-464e-b11a-c7343acbf887\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-xvg94" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.169851 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/4e8d2519-5570-4ac9-8968-3015f3658ef0-socket-dir\") pod \"csi-hostpathplugin-bk7j8\" (UID: \"4e8d2519-5570-4ac9-8968-3015f3658ef0\") " pod="hostpath-provisioner/csi-hostpathplugin-bk7j8" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.169867 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b6jsr\" (UniqueName: \"kubernetes.io/projected/d32d51fd-e8e1-4bc7-a1b4-be5c851a8651-kube-api-access-b6jsr\") pod \"migrator-59844c95c7-gqgvg\" (UID: \"d32d51fd-e8e1-4bc7-a1b4-be5c851a8651\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-gqgvg" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.169881 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/1f3db07f-b32b-46e0-b697-a5140c5021cd-srv-cert\") pod \"olm-operator-6b444d44fb-pt2wd\" (UID: \"1f3db07f-b32b-46e0-b697-a5140c5021cd\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-pt2wd" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.169897 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/8e450a8e-52f9-48fe-96c8-8f444a7437fe-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-x6k88\" (UID: \"8e450a8e-52f9-48fe-96c8-8f444a7437fe\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-x6k88" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.169915 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e6995aa9-a5fd-4994-bbc0-a6ed0b630fa1-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-8fpj2\" (UID: \"e6995aa9-a5fd-4994-bbc0-a6ed0b630fa1\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8fpj2" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.169940 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/3a9d6180-dff8-46d9-92df-20ff4b1b466f-apiservice-cert\") pod \"packageserver-d55dfcdfc-mhpsl\" (UID: \"3a9d6180-dff8-46d9-92df-20ff4b1b466f\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-mhpsl" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.169954 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/c36f84f6-f4ce-40bd-b151-211f4face9e1-certs\") pod \"machine-config-server-ftnnx\" (UID: \"c36f84f6-f4ce-40bd-b151-211f4face9e1\") " pod="openshift-machine-config-operator/machine-config-server-ftnnx" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.169968 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fkj29\" (UniqueName: \"kubernetes.io/projected/78a9da4a-a389-4286-a4cc-d2924052721a-kube-api-access-fkj29\") pod \"ingress-canary-t2c5q\" (UID: \"78a9da4a-a389-4286-a4cc-d2924052721a\") " pod="openshift-ingress-canary/ingress-canary-t2c5q" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.169983 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w8dpk\" (UniqueName: \"kubernetes.io/projected/e3de71c3-340a-4492-8235-043eeb8bc509-kube-api-access-w8dpk\") pod \"service-ca-9c57cc56f-rrdrz\" (UID: \"e3de71c3-340a-4492-8235-043eeb8bc509\") " pod="openshift-service-ca/service-ca-9c57cc56f-rrdrz" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.170007 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q5jk9\" (UniqueName: \"kubernetes.io/projected/fb3e806c-c739-4628-bf9c-8745195fce4c-kube-api-access-q5jk9\") pod \"multus-admission-controller-857f4d67dd-fp6zg\" (UID: \"fb3e806c-c739-4628-bf9c-8745195fce4c\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-fp6zg" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.170023 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c21f1e83-8b5a-4fee-b51e-3617d90b23f8-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-l8t2q\" (UID: \"c21f1e83-8b5a-4fee-b51e-3617d90b23f8\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-l8t2q" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.170038 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/4e8d2519-5570-4ac9-8968-3015f3658ef0-mountpoint-dir\") pod \"csi-hostpathplugin-bk7j8\" (UID: \"4e8d2519-5570-4ac9-8968-3015f3658ef0\") " pod="hostpath-provisioner/csi-hostpathplugin-bk7j8" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.170052 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/c36f84f6-f4ce-40bd-b151-211f4face9e1-node-bootstrap-token\") pod \"machine-config-server-ftnnx\" (UID: \"c36f84f6-f4ce-40bd-b151-211f4face9e1\") " pod="openshift-machine-config-operator/machine-config-server-ftnnx" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.170066 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/78a9da4a-a389-4286-a4cc-d2924052721a-cert\") pod \"ingress-canary-t2c5q\" (UID: \"78a9da4a-a389-4286-a4cc-d2924052721a\") " pod="openshift-ingress-canary/ingress-canary-t2c5q" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.170081 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pdhbk\" (UniqueName: \"kubernetes.io/projected/460fedd0-6ec4-4ef5-91ca-cc62ae21ebc4-kube-api-access-pdhbk\") pod \"service-ca-operator-777779d784-rjl56\" (UID: \"460fedd0-6ec4-4ef5-91ca-cc62ae21ebc4\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-rjl56" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.170095 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cbz6m\" (UniqueName: \"kubernetes.io/projected/765bb4a4-7c41-414b-a9be-a54be49b76ff-kube-api-access-cbz6m\") pod \"marketplace-operator-79b997595-lwc4r\" (UID: \"765bb4a4-7c41-414b-a9be-a54be49b76ff\") " pod="openshift-marketplace/marketplace-operator-79b997595-lwc4r" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.170121 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z5v42\" (UniqueName: \"kubernetes.io/projected/1f3db07f-b32b-46e0-b697-a5140c5021cd-kube-api-access-z5v42\") pod \"olm-operator-6b444d44fb-pt2wd\" (UID: \"1f3db07f-b32b-46e0-b697-a5140c5021cd\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-pt2wd" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.170138 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/e3de71c3-340a-4492-8235-043eeb8bc509-signing-cabundle\") pod \"service-ca-9c57cc56f-rrdrz\" (UID: \"e3de71c3-340a-4492-8235-043eeb8bc509\") " pod="openshift-service-ca/service-ca-9c57cc56f-rrdrz" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.170165 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/3a9d6180-dff8-46d9-92df-20ff4b1b466f-tmpfs\") pod \"packageserver-d55dfcdfc-mhpsl\" (UID: \"3a9d6180-dff8-46d9-92df-20ff4b1b466f\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-mhpsl" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.170182 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/60490c57-c15a-4479-a735-257c6f60f1b0-proxy-tls\") pod \"machine-config-operator-74547568cd-7w4jb\" (UID: \"60490c57-c15a-4479-a735-257c6f60f1b0\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-7w4jb" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.170197 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/60490c57-c15a-4479-a735-257c6f60f1b0-auth-proxy-config\") pod \"machine-config-operator-74547568cd-7w4jb\" (UID: \"60490c57-c15a-4479-a735-257c6f60f1b0\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-7w4jb" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.170214 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qvls4\" (UniqueName: \"kubernetes.io/projected/3a9d6180-dff8-46d9-92df-20ff4b1b466f-kube-api-access-qvls4\") pod \"packageserver-d55dfcdfc-mhpsl\" (UID: \"3a9d6180-dff8-46d9-92df-20ff4b1b466f\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-mhpsl" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.170230 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9c863b8e-b487-438d-a745-e3f41c2ef92c-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-2qwm4\" (UID: \"9c863b8e-b487-438d-a745-e3f41c2ef92c\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-2qwm4" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.170244 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/9c863b8e-b487-438d-a745-e3f41c2ef92c-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-2qwm4\" (UID: \"9c863b8e-b487-438d-a745-e3f41c2ef92c\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-2qwm4" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.170262 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dc7def17-767d-47b6-ad2f-4cf73e84ea5e-config\") pod \"kube-apiserver-operator-766d6c64bb-854zg\" (UID: \"dc7def17-767d-47b6-ad2f-4cf73e84ea5e\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-854zg" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.170278 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9fq4h\" (UniqueName: \"kubernetes.io/projected/af89c7bf-ebbc-464e-b11a-c7343acbf887-kube-api-access-9fq4h\") pod \"machine-config-controller-84d6567774-xvg94\" (UID: \"af89c7bf-ebbc-464e-b11a-c7343acbf887\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-xvg94" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.170293 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/e3de71c3-340a-4492-8235-043eeb8bc509-signing-key\") pod \"service-ca-9c57cc56f-rrdrz\" (UID: \"e3de71c3-340a-4492-8235-043eeb8bc509\") " pod="openshift-service-ca/service-ca-9c57cc56f-rrdrz" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.170310 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bgnhg\" (UniqueName: \"kubernetes.io/projected/0fca25a0-30bc-4906-8557-552531236ee4-kube-api-access-bgnhg\") pod \"package-server-manager-789f6589d5-dc5hk\" (UID: \"0fca25a0-30bc-4906-8557-552531236ee4\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-dc5hk" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.170326 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9c863b8e-b487-438d-a745-e3f41c2ef92c-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-2qwm4\" (UID: \"9c863b8e-b487-438d-a745-e3f41c2ef92c\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-2qwm4" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.171496 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9c863b8e-b487-438d-a745-e3f41c2ef92c-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-2qwm4\" (UID: \"9c863b8e-b487-438d-a745-e3f41c2ef92c\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-2qwm4" Dec 06 15:34:30 crc kubenswrapper[5003]: E1206 15:34:30.171565 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-06 15:34:30.671552911 +0000 UTC m=+149.204907292 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.174973 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c21f1e83-8b5a-4fee-b51e-3617d90b23f8-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-l8t2q\" (UID: \"c21f1e83-8b5a-4fee-b51e-3617d90b23f8\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-l8t2q" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.175125 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/4e8d2519-5570-4ac9-8968-3015f3658ef0-registration-dir\") pod \"csi-hostpathplugin-bk7j8\" (UID: \"4e8d2519-5570-4ac9-8968-3015f3658ef0\") " pod="hostpath-provisioner/csi-hostpathplugin-bk7j8" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.176220 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e6995aa9-a5fd-4994-bbc0-a6ed0b630fa1-config\") pod \"kube-controller-manager-operator-78b949d7b-8fpj2\" (UID: \"e6995aa9-a5fd-4994-bbc0-a6ed0b630fa1\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8fpj2" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.177499 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-92jlq" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.178801 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/af89c7bf-ebbc-464e-b11a-c7343acbf887-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-xvg94\" (UID: \"af89c7bf-ebbc-464e-b11a-c7343acbf887\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-xvg94" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.179585 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/37f5fb4c-d8e0-421a-a921-a88e7a934b3a-config-volume\") pod \"collect-profiles-29417250-pmq24\" (UID: \"37f5fb4c-d8e0-421a-a921-a88e7a934b3a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29417250-pmq24" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.179675 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/4e8d2519-5570-4ac9-8968-3015f3658ef0-csi-data-dir\") pod \"csi-hostpathplugin-bk7j8\" (UID: \"4e8d2519-5570-4ac9-8968-3015f3658ef0\") " pod="hostpath-provisioner/csi-hostpathplugin-bk7j8" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.180396 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/de472d67-6d24-44c5-becf-1cd20d390264-config-volume\") pod \"dns-default-wx2w9\" (UID: \"de472d67-6d24-44c5-becf-1cd20d390264\") " pod="openshift-dns/dns-default-wx2w9" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.182807 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-tsg4h" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.183731 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jswnm\" (UniqueName: \"kubernetes.io/projected/495babf4-9201-4523-8a21-44e001d4f4c1-kube-api-access-jswnm\") pod \"downloads-7954f5f757-xs4nd\" (UID: \"495babf4-9201-4523-8a21-44e001d4f4c1\") " pod="openshift-console/downloads-7954f5f757-xs4nd" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.184359 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/60490c57-c15a-4479-a735-257c6f60f1b0-auth-proxy-config\") pod \"machine-config-operator-74547568cd-7w4jb\" (UID: \"60490c57-c15a-4479-a735-257c6f60f1b0\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-7w4jb" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.185178 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/e3de71c3-340a-4492-8235-043eeb8bc509-signing-cabundle\") pod \"service-ca-9c57cc56f-rrdrz\" (UID: \"e3de71c3-340a-4492-8235-043eeb8bc509\") " pod="openshift-service-ca/service-ca-9c57cc56f-rrdrz" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.185482 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/3a9d6180-dff8-46d9-92df-20ff4b1b466f-tmpfs\") pod \"packageserver-d55dfcdfc-mhpsl\" (UID: \"3a9d6180-dff8-46d9-92df-20ff4b1b466f\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-mhpsl" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.185757 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/secret/c36f84f6-f4ce-40bd-b151-211f4face9e1-certs\") pod \"machine-config-server-ftnnx\" (UID: \"c36f84f6-f4ce-40bd-b151-211f4face9e1\") " pod="openshift-machine-config-operator/machine-config-server-ftnnx" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.185874 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/4e8d2519-5570-4ac9-8968-3015f3658ef0-socket-dir\") pod \"csi-hostpathplugin-bk7j8\" (UID: \"4e8d2519-5570-4ac9-8968-3015f3658ef0\") " pod="hostpath-provisioner/csi-hostpathplugin-bk7j8" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.187208 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c21f1e83-8b5a-4fee-b51e-3617d90b23f8-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-l8t2q\" (UID: \"c21f1e83-8b5a-4fee-b51e-3617d90b23f8\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-l8t2q" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.187271 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/4e8d2519-5570-4ac9-8968-3015f3658ef0-mountpoint-dir\") pod \"csi-hostpathplugin-bk7j8\" (UID: \"4e8d2519-5570-4ac9-8968-3015f3658ef0\") " pod="hostpath-provisioner/csi-hostpathplugin-bk7j8" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.188674 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/1f3db07f-b32b-46e0-b697-a5140c5021cd-srv-cert\") pod \"olm-operator-6b444d44fb-pt2wd\" (UID: \"1f3db07f-b32b-46e0-b697-a5140c5021cd\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-pt2wd" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.191415 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/78a9da4a-a389-4286-a4cc-d2924052721a-cert\") pod \"ingress-canary-t2c5q\" (UID: \"78a9da4a-a389-4286-a4cc-d2924052721a\") " pod="openshift-ingress-canary/ingress-canary-t2c5q" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.192591 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/e109ae1a-d737-4150-9e67-728d9d8d32dc-profile-collector-cert\") pod \"catalog-operator-68c6474976-rqfnv\" (UID: \"e109ae1a-d737-4150-9e67-728d9d8d32dc\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-rqfnv" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.192868 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dc7def17-767d-47b6-ad2f-4cf73e84ea5e-config\") pod \"kube-apiserver-operator-766d6c64bb-854zg\" (UID: \"dc7def17-767d-47b6-ad2f-4cf73e84ea5e\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-854zg" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.199153 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/60490c57-c15a-4479-a735-257c6f60f1b0-images\") pod \"machine-config-operator-74547568cd-7w4jb\" (UID: \"60490c57-c15a-4479-a735-257c6f60f1b0\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-7w4jb" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.200126 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-xzdsc" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.200877 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/fb3e806c-c739-4628-bf9c-8745195fce4c-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-fp6zg\" (UID: \"fb3e806c-c739-4628-bf9c-8745195fce4c\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-fp6zg" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.201053 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/4e8d2519-5570-4ac9-8968-3015f3658ef0-plugins-dir\") pod \"csi-hostpathplugin-bk7j8\" (UID: \"4e8d2519-5570-4ac9-8968-3015f3658ef0\") " pod="hostpath-provisioner/csi-hostpathplugin-bk7j8" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.201245 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/765bb4a4-7c41-414b-a9be-a54be49b76ff-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-lwc4r\" (UID: \"765bb4a4-7c41-414b-a9be-a54be49b76ff\") " pod="openshift-marketplace/marketplace-operator-79b997595-lwc4r" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.201367 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/460fedd0-6ec4-4ef5-91ca-cc62ae21ebc4-config\") pod \"service-ca-operator-777779d784-rjl56\" (UID: \"460fedd0-6ec4-4ef5-91ca-cc62ae21ebc4\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-rjl56" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.203216 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/c36f84f6-f4ce-40bd-b151-211f4face9e1-node-bootstrap-token\") pod \"machine-config-server-ftnnx\" (UID: \"c36f84f6-f4ce-40bd-b151-211f4face9e1\") " pod="openshift-machine-config-operator/machine-config-server-ftnnx" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.208847 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-bvkqz" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.216019 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/61f3ad0c-2191-4ea5-96ed-763ca80fbcba-bound-sa-token\") pod \"ingress-operator-5b745b69d9-94w25\" (UID: \"61f3ad0c-2191-4ea5-96ed-763ca80fbcba\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-94w25" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.219084 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/460fedd0-6ec4-4ef5-91ca-cc62ae21ebc4-serving-cert\") pod \"service-ca-operator-777779d784-rjl56\" (UID: \"460fedd0-6ec4-4ef5-91ca-cc62ae21ebc4\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-rjl56" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.222124 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/37f5fb4c-d8e0-421a-a921-a88e7a934b3a-secret-volume\") pod \"collect-profiles-29417250-pmq24\" (UID: \"37f5fb4c-d8e0-421a-a921-a88e7a934b3a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29417250-pmq24" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.222138 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/e109ae1a-d737-4150-9e67-728d9d8d32dc-srv-cert\") pod \"catalog-operator-68c6474976-rqfnv\" (UID: \"e109ae1a-d737-4150-9e67-728d9d8d32dc\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-rqfnv" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.222724 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/8e450a8e-52f9-48fe-96c8-8f444a7437fe-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-x6k88\" (UID: \"8e450a8e-52f9-48fe-96c8-8f444a7437fe\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-x6k88" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.233745 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-jwrlg" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.235331 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/dc7def17-767d-47b6-ad2f-4cf73e84ea5e-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-854zg\" (UID: \"dc7def17-767d-47b6-ad2f-4cf73e84ea5e\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-854zg" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.246337 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-hppvr" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.248943 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/60490c57-c15a-4479-a735-257c6f60f1b0-proxy-tls\") pod \"machine-config-operator-74547568cd-7w4jb\" (UID: \"60490c57-c15a-4479-a735-257c6f60f1b0\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-7w4jb" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.249168 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/3a9d6180-dff8-46d9-92df-20ff4b1b466f-apiservice-cert\") pod \"packageserver-d55dfcdfc-mhpsl\" (UID: \"3a9d6180-dff8-46d9-92df-20ff4b1b466f\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-mhpsl" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.249259 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e6995aa9-a5fd-4994-bbc0-a6ed0b630fa1-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-8fpj2\" (UID: \"e6995aa9-a5fd-4994-bbc0-a6ed0b630fa1\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8fpj2" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.249323 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-vr8wv" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.249960 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9c863b8e-b487-438d-a745-e3f41c2ef92c-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-2qwm4\" (UID: \"9c863b8e-b487-438d-a745-e3f41c2ef92c\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-2qwm4" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.250410 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/af89c7bf-ebbc-464e-b11a-c7343acbf887-proxy-tls\") pod \"machine-config-controller-84d6567774-xvg94\" (UID: \"af89c7bf-ebbc-464e-b11a-c7343acbf887\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-xvg94" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.251356 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/e3de71c3-340a-4492-8235-043eeb8bc509-signing-key\") pod \"service-ca-9c57cc56f-rrdrz\" (UID: \"e3de71c3-340a-4492-8235-043eeb8bc509\") " pod="openshift-service-ca/service-ca-9c57cc56f-rrdrz" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.251367 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/765bb4a4-7c41-414b-a9be-a54be49b76ff-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-lwc4r\" (UID: \"765bb4a4-7c41-414b-a9be-a54be49b76ff\") " pod="openshift-marketplace/marketplace-operator-79b997595-lwc4r" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.251864 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-86b74\" (UniqueName: \"kubernetes.io/projected/150d06be-be0f-4425-a584-760d19d009b5-kube-api-access-86b74\") pod \"etcd-operator-b45778765-p7hq2\" (UID: \"150d06be-be0f-4425-a584-760d19d009b5\") " pod="openshift-etcd-operator/etcd-operator-b45778765-p7hq2" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.251912 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/1f3db07f-b32b-46e0-b697-a5140c5021cd-profile-collector-cert\") pod \"olm-operator-6b444d44fb-pt2wd\" (UID: \"1f3db07f-b32b-46e0-b697-a5140c5021cd\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-pt2wd" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.263731 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/de472d67-6d24-44c5-becf-1cd20d390264-metrics-tls\") pod \"dns-default-wx2w9\" (UID: \"de472d67-6d24-44c5-becf-1cd20d390264\") " pod="openshift-dns/dns-default-wx2w9" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.264109 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nzvcm\" (UniqueName: \"kubernetes.io/projected/61f3ad0c-2191-4ea5-96ed-763ca80fbcba-kube-api-access-nzvcm\") pod \"ingress-operator-5b745b69d9-94w25\" (UID: \"61f3ad0c-2191-4ea5-96ed-763ca80fbcba\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-94w25" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.268803 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/0fca25a0-30bc-4906-8557-552531236ee4-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-dc5hk\" (UID: \"0fca25a0-30bc-4906-8557-552531236ee4\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-dc5hk" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.271733 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-tvwvh\" (UID: \"5cb1719e-962f-436c-bbc0-cd048de8dd14\") " pod="openshift-image-registry/image-registry-697d97f7c8-tvwvh" Dec 06 15:34:30 crc kubenswrapper[5003]: E1206 15:34:30.272093 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-06 15:34:30.772073927 +0000 UTC m=+149.305428308 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-tvwvh" (UID: "5cb1719e-962f-436c-bbc0-cd048de8dd14") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.278002 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-xs4nd" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.286217 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/5cb1719e-962f-436c-bbc0-cd048de8dd14-bound-sa-token\") pod \"image-registry-697d97f7c8-tvwvh\" (UID: \"5cb1719e-962f-436c-bbc0-cd048de8dd14\") " pod="openshift-image-registry/image-registry-697d97f7c8-tvwvh" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.298900 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-p7hq2" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.305853 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-94w25" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.308640 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/3a9d6180-dff8-46d9-92df-20ff4b1b466f-webhook-cert\") pod \"packageserver-d55dfcdfc-mhpsl\" (UID: \"3a9d6180-dff8-46d9-92df-20ff4b1b466f\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-mhpsl" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.316721 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-mvtxr" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.319292 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cwk6j\" (UniqueName: \"kubernetes.io/projected/b4d4786f-591a-43fb-afe1-04c8daa257a7-kube-api-access-cwk6j\") pod \"router-default-5444994796-4qjqw\" (UID: \"b4d4786f-591a-43fb-afe1-04c8daa257a7\") " pod="openshift-ingress/router-default-5444994796-4qjqw" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.328643 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-htvtr\" (UniqueName: \"kubernetes.io/projected/5cb1719e-962f-436c-bbc0-cd048de8dd14-kube-api-access-htvtr\") pod \"image-registry-697d97f7c8-tvwvh\" (UID: \"5cb1719e-962f-436c-bbc0-cd048de8dd14\") " pod="openshift-image-registry/image-registry-697d97f7c8-tvwvh" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.348606 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-45dwk\" (UniqueName: \"kubernetes.io/projected/de472d67-6d24-44c5-becf-1cd20d390264-kube-api-access-45dwk\") pod \"dns-default-wx2w9\" (UID: \"de472d67-6d24-44c5-becf-1cd20d390264\") " pod="openshift-dns/dns-default-wx2w9" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.349377 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vxpbs\" (UniqueName: \"kubernetes.io/projected/60490c57-c15a-4479-a735-257c6f60f1b0-kube-api-access-vxpbs\") pod \"machine-config-operator-74547568cd-7w4jb\" (UID: \"60490c57-c15a-4479-a735-257c6f60f1b0\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-7w4jb" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.365801 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xm4g5\" (UniqueName: \"kubernetes.io/projected/c36f84f6-f4ce-40bd-b151-211f4face9e1-kube-api-access-xm4g5\") pod \"machine-config-server-ftnnx\" (UID: \"c36f84f6-f4ce-40bd-b151-211f4face9e1\") " pod="openshift-machine-config-operator/machine-config-server-ftnnx" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.376344 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 06 15:34:30 crc kubenswrapper[5003]: E1206 15:34:30.376857 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-06 15:34:30.876835256 +0000 UTC m=+149.410189637 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.381949 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pdhbk\" (UniqueName: \"kubernetes.io/projected/460fedd0-6ec4-4ef5-91ca-cc62ae21ebc4-kube-api-access-pdhbk\") pod \"service-ca-operator-777779d784-rjl56\" (UID: \"460fedd0-6ec4-4ef5-91ca-cc62ae21ebc4\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-rjl56" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.437175 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-ftnnx" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.449572 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cbz6m\" (UniqueName: \"kubernetes.io/projected/765bb4a4-7c41-414b-a9be-a54be49b76ff-kube-api-access-cbz6m\") pod \"marketplace-operator-79b997595-lwc4r\" (UID: \"765bb4a4-7c41-414b-a9be-a54be49b76ff\") " pod="openshift-marketplace/marketplace-operator-79b997595-lwc4r" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.452804 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-rjl56" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.453148 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q5jk9\" (UniqueName: \"kubernetes.io/projected/fb3e806c-c739-4628-bf9c-8745195fce4c-kube-api-access-q5jk9\") pod \"multus-admission-controller-857f4d67dd-fp6zg\" (UID: \"fb3e806c-c739-4628-bf9c-8745195fce4c\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-fp6zg" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.453746 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z5v42\" (UniqueName: \"kubernetes.io/projected/1f3db07f-b32b-46e0-b697-a5140c5021cd-kube-api-access-z5v42\") pod \"olm-operator-6b444d44fb-pt2wd\" (UID: \"1f3db07f-b32b-46e0-b697-a5140c5021cd\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-pt2wd" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.466446 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fkj29\" (UniqueName: \"kubernetes.io/projected/78a9da4a-a389-4286-a4cc-d2924052721a-kube-api-access-fkj29\") pod \"ingress-canary-t2c5q\" (UID: \"78a9da4a-a389-4286-a4cc-d2924052721a\") " pod="openshift-ingress-canary/ingress-canary-t2c5q" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.478255 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-tvwvh\" (UID: \"5cb1719e-962f-436c-bbc0-cd048de8dd14\") " pod="openshift-image-registry/image-registry-697d97f7c8-tvwvh" Dec 06 15:34:30 crc kubenswrapper[5003]: E1206 15:34:30.478644 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-06 15:34:30.978602105 +0000 UTC m=+149.511956486 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-tvwvh" (UID: "5cb1719e-962f-436c-bbc0-cd048de8dd14") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.517878 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w8dpk\" (UniqueName: \"kubernetes.io/projected/e3de71c3-340a-4492-8235-043eeb8bc509-kube-api-access-w8dpk\") pod \"service-ca-9c57cc56f-rrdrz\" (UID: \"e3de71c3-340a-4492-8235-043eeb8bc509\") " pod="openshift-service-ca/service-ca-9c57cc56f-rrdrz" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.518357 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-t2c5q" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.521072 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b6jsr\" (UniqueName: \"kubernetes.io/projected/d32d51fd-e8e1-4bc7-a1b4-be5c851a8651-kube-api-access-b6jsr\") pod \"migrator-59844c95c7-gqgvg\" (UID: \"d32d51fd-e8e1-4bc7-a1b4-be5c851a8651\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-gqgvg" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.523652 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-rrdrz" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.546922 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-4qjqw" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.550944 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-wx2w9" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.550984 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k5m52\" (UniqueName: \"kubernetes.io/projected/8e450a8e-52f9-48fe-96c8-8f444a7437fe-kube-api-access-k5m52\") pod \"control-plane-machine-set-operator-78cbb6b69f-x6k88\" (UID: \"8e450a8e-52f9-48fe-96c8-8f444a7437fe\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-x6k88" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.552107 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zg49q\" (UniqueName: \"kubernetes.io/projected/e109ae1a-d737-4150-9e67-728d9d8d32dc-kube-api-access-zg49q\") pod \"catalog-operator-68c6474976-rqfnv\" (UID: \"e109ae1a-d737-4150-9e67-728d9d8d32dc\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-rqfnv" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.552364 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-rqfnv" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.552802 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-pt2wd" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.560056 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qvls4\" (UniqueName: \"kubernetes.io/projected/3a9d6180-dff8-46d9-92df-20ff4b1b466f-kube-api-access-qvls4\") pod \"packageserver-d55dfcdfc-mhpsl\" (UID: \"3a9d6180-dff8-46d9-92df-20ff4b1b466f\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-mhpsl" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.567771 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-g9t4q" event={"ID":"9990e306-5ae4-467e-9cc4-0225f9c05fc7","Type":"ContainerStarted","Data":"db91c2a9628a0aad0aa0e7a1210c5736a4dabe6ac52cb6eb2ee1b064eabb752f"} Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.572404 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-g9t4q" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.581844 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.581990 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9fq4h\" (UniqueName: \"kubernetes.io/projected/af89c7bf-ebbc-464e-b11a-c7343acbf887-kube-api-access-9fq4h\") pod \"machine-config-controller-84d6567774-xvg94\" (UID: \"af89c7bf-ebbc-464e-b11a-c7343acbf887\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-xvg94" Dec 06 15:34:30 crc kubenswrapper[5003]: E1206 15:34:30.582158 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-06 15:34:31.082139931 +0000 UTC m=+149.615494312 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.582244 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-7w4jb" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.594088 5003 generic.go:334] "Generic (PLEG): container finished" podID="58fe25e3-98d7-4725-841b-4bcd2e2f628f" containerID="7d385d61bee9e55b804c6056db17f0344e3729e74015ba9a0267aa83c6833349" exitCode=0 Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.594178 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-48sc5" event={"ID":"58fe25e3-98d7-4725-841b-4bcd2e2f628f","Type":"ContainerDied","Data":"7d385d61bee9e55b804c6056db17f0344e3729e74015ba9a0267aa83c6833349"} Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.595020 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-mhpsl" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.596379 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/9c863b8e-b487-438d-a745-e3f41c2ef92c-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-2qwm4\" (UID: \"9c863b8e-b487-438d-a745-e3f41c2ef92c\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-2qwm4" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.601049 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-jx64p" event={"ID":"9280dab9-204d-4112-98be-c6809da2ad4e","Type":"ContainerStarted","Data":"9a428ff075f6543264ca77479719bf53da0d29f06b393e0782ffee216d49b2a9"} Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.601079 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-jx64p" event={"ID":"9280dab9-204d-4112-98be-c6809da2ad4e","Type":"ContainerStarted","Data":"1cba9224bd24a41251fd1d91fb0cdb515061815abc89857ea1f0d4c906fe140b"} Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.611826 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-gqgvg" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.627428 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b2tmn\" (UniqueName: \"kubernetes.io/projected/c21f1e83-8b5a-4fee-b51e-3617d90b23f8-kube-api-access-b2tmn\") pod \"kube-storage-version-migrator-operator-b67b599dd-l8t2q\" (UID: \"c21f1e83-8b5a-4fee-b51e-3617d90b23f8\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-l8t2q" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.629810 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-x6k88" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.638788 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-5ww6p" event={"ID":"41e7b2c5-c9fe-4000-830c-bf3351dd327f","Type":"ContainerStarted","Data":"8728f043eeb312c7f5535383f18ae611ae4073c338c0e4a3a3b012386fb4992c"} Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.638983 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-lwc4r" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.645101 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-dh4ts" event={"ID":"07dcad69-d3a4-40e2-a4d2-e83eb74631d7","Type":"ContainerStarted","Data":"447fc1c1113a5476eb630067b930ed8e4a10124a0725b6b5336e7a41b5656d7f"} Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.645159 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-dh4ts" event={"ID":"07dcad69-d3a4-40e2-a4d2-e83eb74631d7","Type":"ContainerStarted","Data":"39033e92db7ecb2c5b05beaf5ee2708f4d3c2ab5056926b6a4f8480e8194cb33"} Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.659571 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-fp6zg" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.683404 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-879f6c89f-78q2b" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.687427 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-tvwvh\" (UID: \"5cb1719e-962f-436c-bbc0-cd048de8dd14\") " pod="openshift-image-registry/image-registry-697d97f7c8-tvwvh" Dec 06 15:34:30 crc kubenswrapper[5003]: E1206 15:34:30.694668 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-06 15:34:31.194651718 +0000 UTC m=+149.728006099 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-tvwvh" (UID: "5cb1719e-962f-436c-bbc0-cd048de8dd14") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.695989 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-2qwm4" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.725198 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/dc7def17-767d-47b6-ad2f-4cf73e84ea5e-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-854zg\" (UID: \"dc7def17-767d-47b6-ad2f-4cf73e84ea5e\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-854zg" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.733443 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b9g6f\" (UniqueName: \"kubernetes.io/projected/4e8d2519-5570-4ac9-8968-3015f3658ef0-kube-api-access-b9g6f\") pod \"csi-hostpathplugin-bk7j8\" (UID: \"4e8d2519-5570-4ac9-8968-3015f3658ef0\") " pod="hostpath-provisioner/csi-hostpathplugin-bk7j8" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.736132 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e6995aa9-a5fd-4994-bbc0-a6ed0b630fa1-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-8fpj2\" (UID: \"e6995aa9-a5fd-4994-bbc0-a6ed0b630fa1\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8fpj2" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.736842 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nmczj\" (UniqueName: \"kubernetes.io/projected/37f5fb4c-d8e0-421a-a921-a88e7a934b3a-kube-api-access-nmczj\") pod \"collect-profiles-29417250-pmq24\" (UID: \"37f5fb4c-d8e0-421a-a921-a88e7a934b3a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29417250-pmq24" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.776924 5003 patch_prober.go:28] interesting pod/route-controller-manager-6576b87f9c-g9t4q container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.9:8443/healthz\": dial tcp 10.217.0.9:8443: connect: connection refused" start-of-body= Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.776979 5003 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-g9t4q" podUID="9990e306-5ae4-467e-9cc4-0225f9c05fc7" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.9:8443/healthz\": dial tcp 10.217.0.9:8443: connect: connection refused" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.786213 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29417250-pmq24" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.788074 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.788198 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bgnhg\" (UniqueName: \"kubernetes.io/projected/0fca25a0-30bc-4906-8557-552531236ee4-kube-api-access-bgnhg\") pod \"package-server-manager-789f6589d5-dc5hk\" (UID: \"0fca25a0-30bc-4906-8557-552531236ee4\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-dc5hk" Dec 06 15:34:30 crc kubenswrapper[5003]: E1206 15:34:30.788661 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-06 15:34:31.288645798 +0000 UTC m=+149.822000179 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.846557 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-xvg94" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.858132 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-l8t2q" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.866149 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-bk7j8" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.887756 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-854zg" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.899451 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-tvwvh\" (UID: \"5cb1719e-962f-436c-bbc0-cd048de8dd14\") " pod="openshift-image-registry/image-registry-697d97f7c8-tvwvh" Dec 06 15:34:30 crc kubenswrapper[5003]: E1206 15:34:30.899847 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-06 15:34:31.39983232 +0000 UTC m=+149.933186711 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-tvwvh" (UID: "5cb1719e-962f-436c-bbc0-cd048de8dd14") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.901667 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8fpj2" Dec 06 15:34:30 crc kubenswrapper[5003]: I1206 15:34:30.920828 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-dc5hk" Dec 06 15:34:31 crc kubenswrapper[5003]: I1206 15:34:31.001027 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 06 15:34:31 crc kubenswrapper[5003]: E1206 15:34:31.001738 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-06 15:34:31.501719251 +0000 UTC m=+150.035073632 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 15:34:31 crc kubenswrapper[5003]: I1206 15:34:31.021147 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-drh5n"] Dec 06 15:34:31 crc kubenswrapper[5003]: I1206 15:34:31.069090 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-g9t4q" podStartSLOduration=125.069073647 podStartE2EDuration="2m5.069073647s" podCreationTimestamp="2025-12-06 15:32:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 15:34:31.067309481 +0000 UTC m=+149.600663872" watchObservedRunningTime="2025-12-06 15:34:31.069073647 +0000 UTC m=+149.602428028" Dec 06 15:34:31 crc kubenswrapper[5003]: I1206 15:34:31.101620 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-npbcn"] Dec 06 15:34:31 crc kubenswrapper[5003]: I1206 15:34:31.103579 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-tvwvh\" (UID: \"5cb1719e-962f-436c-bbc0-cd048de8dd14\") " pod="openshift-image-registry/image-registry-697d97f7c8-tvwvh" Dec 06 15:34:31 crc kubenswrapper[5003]: E1206 15:34:31.103854 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-06 15:34:31.60384327 +0000 UTC m=+150.137197651 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-tvwvh" (UID: "5cb1719e-962f-436c-bbc0-cd048de8dd14") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 15:34:31 crc kubenswrapper[5003]: I1206 15:34:31.206030 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 06 15:34:31 crc kubenswrapper[5003]: E1206 15:34:31.206417 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-06 15:34:31.706400879 +0000 UTC m=+150.239755260 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 15:34:31 crc kubenswrapper[5003]: I1206 15:34:31.307467 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-tvwvh\" (UID: \"5cb1719e-962f-436c-bbc0-cd048de8dd14\") " pod="openshift-image-registry/image-registry-697d97f7c8-tvwvh" Dec 06 15:34:31 crc kubenswrapper[5003]: E1206 15:34:31.308100 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-06 15:34:31.808087257 +0000 UTC m=+150.341441638 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-tvwvh" (UID: "5cb1719e-962f-436c-bbc0-cd048de8dd14") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 15:34:31 crc kubenswrapper[5003]: I1206 15:34:31.412075 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 06 15:34:31 crc kubenswrapper[5003]: E1206 15:34:31.412450 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-06 15:34:31.912431624 +0000 UTC m=+150.445786005 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 15:34:31 crc kubenswrapper[5003]: I1206 15:34:31.440868 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-879f6c89f-78q2b" podStartSLOduration=125.440849726 podStartE2EDuration="2m5.440849726s" podCreationTimestamp="2025-12-06 15:32:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 15:34:31.440312212 +0000 UTC m=+149.973666593" watchObservedRunningTime="2025-12-06 15:34:31.440849726 +0000 UTC m=+149.974204107" Dec 06 15:34:31 crc kubenswrapper[5003]: I1206 15:34:31.516158 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-tvwvh\" (UID: \"5cb1719e-962f-436c-bbc0-cd048de8dd14\") " pod="openshift-image-registry/image-registry-697d97f7c8-tvwvh" Dec 06 15:34:31 crc kubenswrapper[5003]: E1206 15:34:31.516577 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-06 15:34:32.016561626 +0000 UTC m=+150.549916007 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-tvwvh" (UID: "5cb1719e-962f-436c-bbc0-cd048de8dd14") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 15:34:31 crc kubenswrapper[5003]: I1206 15:34:31.607343 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-xzdsc"] Dec 06 15:34:31 crc kubenswrapper[5003]: I1206 15:34:31.617010 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 06 15:34:31 crc kubenswrapper[5003]: E1206 15:34:31.617175 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-06 15:34:32.117149344 +0000 UTC m=+150.650503725 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 15:34:31 crc kubenswrapper[5003]: I1206 15:34:31.617287 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-tvwvh\" (UID: \"5cb1719e-962f-436c-bbc0-cd048de8dd14\") " pod="openshift-image-registry/image-registry-697d97f7c8-tvwvh" Dec 06 15:34:31 crc kubenswrapper[5003]: E1206 15:34:31.617692 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-06 15:34:32.117677247 +0000 UTC m=+150.651031638 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-tvwvh" (UID: "5cb1719e-962f-436c-bbc0-cd048de8dd14") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 15:34:31 crc kubenswrapper[5003]: I1206 15:34:31.700796 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-5ww6p" event={"ID":"41e7b2c5-c9fe-4000-830c-bf3351dd327f","Type":"ContainerStarted","Data":"249af49ec8444376055e76257926152cd5ca464e0d26fee6f629e45b2603f4f8"} Dec 06 15:34:31 crc kubenswrapper[5003]: I1206 15:34:31.717970 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 06 15:34:31 crc kubenswrapper[5003]: E1206 15:34:31.718324 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-06 15:34:32.218308145 +0000 UTC m=+150.751662526 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 15:34:31 crc kubenswrapper[5003]: I1206 15:34:31.731233 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-drh5n" event={"ID":"cf6c1084-3ac6-4ac5-a15d-7e85f6cf75f5","Type":"ContainerStarted","Data":"c270ade960d0793d13976843e4f46a95055d8b1307551d59bc8076cbc18eb921"} Dec 06 15:34:31 crc kubenswrapper[5003]: I1206 15:34:31.731275 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-ftnnx" event={"ID":"c36f84f6-f4ce-40bd-b151-211f4face9e1","Type":"ContainerStarted","Data":"ba0c03a5d4f5b6e163f19a6e89877d7e48dfd1fe7c1277785d262977e02cc5b7"} Dec 06 15:34:31 crc kubenswrapper[5003]: I1206 15:34:31.733037 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-npbcn" event={"ID":"cee02b0a-ce4b-452e-aa00-48c7823c13d0","Type":"ContainerStarted","Data":"6a80d46599fb552b95e94bc51705bf0a488f3fe799f64daa6271bf772ee57eaf"} Dec 06 15:34:31 crc kubenswrapper[5003]: I1206 15:34:31.736164 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-4qjqw" event={"ID":"b4d4786f-591a-43fb-afe1-04c8daa257a7","Type":"ContainerStarted","Data":"7d8baa252c0b50752b896400cbd78c65574965f9f9dfb4f696eb31d04385c6c2"} Dec 06 15:34:31 crc kubenswrapper[5003]: I1206 15:34:31.783174 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"5359b541a7f4b4eec0897c83b3ea16c63c1399fdce541dbfa2128b1ee6eb1486"} Dec 06 15:34:31 crc kubenswrapper[5003]: I1206 15:34:31.815812 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"1bfbb2db0cda56567b0676b101c62c5f1ecbf376a2d6760de444c8bb4423e4a6"} Dec 06 15:34:31 crc kubenswrapper[5003]: I1206 15:34:31.820233 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-tvwvh\" (UID: \"5cb1719e-962f-436c-bbc0-cd048de8dd14\") " pod="openshift-image-registry/image-registry-697d97f7c8-tvwvh" Dec 06 15:34:31 crc kubenswrapper[5003]: E1206 15:34:31.820606 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-06 15:34:32.320592839 +0000 UTC m=+150.853947220 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-tvwvh" (UID: "5cb1719e-962f-436c-bbc0-cd048de8dd14") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 15:34:31 crc kubenswrapper[5003]: I1206 15:34:31.923999 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 06 15:34:31 crc kubenswrapper[5003]: E1206 15:34:31.944442 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-06 15:34:32.444420079 +0000 UTC m=+150.977774460 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 15:34:31 crc kubenswrapper[5003]: I1206 15:34:31.944626 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-tvwvh\" (UID: \"5cb1719e-962f-436c-bbc0-cd048de8dd14\") " pod="openshift-image-registry/image-registry-697d97f7c8-tvwvh" Dec 06 15:34:31 crc kubenswrapper[5003]: E1206 15:34:31.945552 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-06 15:34:32.445544959 +0000 UTC m=+150.978899340 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-tvwvh" (UID: "5cb1719e-962f-436c-bbc0-cd048de8dd14") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 15:34:31 crc kubenswrapper[5003]: I1206 15:34:31.972053 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-tsg4h"] Dec 06 15:34:32 crc kubenswrapper[5003]: I1206 15:34:32.056168 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 06 15:34:32 crc kubenswrapper[5003]: E1206 15:34:32.056464 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-06 15:34:32.556438623 +0000 UTC m=+151.089793004 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 15:34:32 crc kubenswrapper[5003]: I1206 15:34:32.164894 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-tvwvh\" (UID: \"5cb1719e-962f-436c-bbc0-cd048de8dd14\") " pod="openshift-image-registry/image-registry-697d97f7c8-tvwvh" Dec 06 15:34:32 crc kubenswrapper[5003]: E1206 15:34:32.165247 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-06 15:34:32.66523145 +0000 UTC m=+151.198585831 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-tvwvh" (UID: "5cb1719e-962f-436c-bbc0-cd048de8dd14") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 15:34:32 crc kubenswrapper[5003]: I1206 15:34:32.270719 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 06 15:34:32 crc kubenswrapper[5003]: E1206 15:34:32.271112 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-06 15:34:32.771092768 +0000 UTC m=+151.304447149 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 15:34:32 crc kubenswrapper[5003]: I1206 15:34:32.372001 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-tvwvh\" (UID: \"5cb1719e-962f-436c-bbc0-cd048de8dd14\") " pod="openshift-image-registry/image-registry-697d97f7c8-tvwvh" Dec 06 15:34:32 crc kubenswrapper[5003]: E1206 15:34:32.372347 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-06 15:34:32.872335393 +0000 UTC m=+151.405689774 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-tvwvh" (UID: "5cb1719e-962f-436c-bbc0-cd048de8dd14") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 15:34:32 crc kubenswrapper[5003]: I1206 15:34:32.447661 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver/apiserver-76f77b778f-jx64p" podStartSLOduration=127.447638062 podStartE2EDuration="2m7.447638062s" podCreationTimestamp="2025-12-06 15:32:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 15:34:32.429613809 +0000 UTC m=+150.962968190" watchObservedRunningTime="2025-12-06 15:34:32.447638062 +0000 UTC m=+150.980992443" Dec 06 15:34:32 crc kubenswrapper[5003]: I1206 15:34:32.473015 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 06 15:34:32 crc kubenswrapper[5003]: I1206 15:34:32.473032 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/machine-api-operator-5694c8668f-dh4ts" podStartSLOduration=126.473018762 podStartE2EDuration="2m6.473018762s" podCreationTimestamp="2025-12-06 15:32:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 15:34:32.452127593 +0000 UTC m=+150.985481974" watchObservedRunningTime="2025-12-06 15:34:32.473018762 +0000 UTC m=+151.006373173" Dec 06 15:34:32 crc kubenswrapper[5003]: E1206 15:34:32.473264 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-06 15:34:32.973252699 +0000 UTC m=+151.506607080 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 15:34:32 crc kubenswrapper[5003]: I1206 15:34:32.577409 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-tvwvh\" (UID: \"5cb1719e-962f-436c-bbc0-cd048de8dd14\") " pod="openshift-image-registry/image-registry-697d97f7c8-tvwvh" Dec 06 15:34:32 crc kubenswrapper[5003]: E1206 15:34:32.577803 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-06 15:34:33.077788172 +0000 UTC m=+151.611142553 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-tvwvh" (UID: "5cb1719e-962f-436c-bbc0-cd048de8dd14") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 15:34:32 crc kubenswrapper[5003]: I1206 15:34:32.648203 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-g9t4q" Dec 06 15:34:32 crc kubenswrapper[5003]: I1206 15:34:32.684372 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 06 15:34:32 crc kubenswrapper[5003]: E1206 15:34:32.684995 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-06 15:34:33.184976926 +0000 UTC m=+151.718331307 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 15:34:32 crc kubenswrapper[5003]: I1206 15:34:32.786052 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-tvwvh\" (UID: \"5cb1719e-962f-436c-bbc0-cd048de8dd14\") " pod="openshift-image-registry/image-registry-697d97f7c8-tvwvh" Dec 06 15:34:32 crc kubenswrapper[5003]: E1206 15:34:32.786456 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-06 15:34:33.286440246 +0000 UTC m=+151.819794627 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-tvwvh" (UID: "5cb1719e-962f-436c-bbc0-cd048de8dd14") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 15:34:32 crc kubenswrapper[5003]: I1206 15:34:32.864669 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-48sc5" event={"ID":"58fe25e3-98d7-4725-841b-4bcd2e2f628f","Type":"ContainerStarted","Data":"b1030a60ed790ac81465f2ea2c8053c26420b9d24581fed779ce5f196367aab0"} Dec 06 15:34:32 crc kubenswrapper[5003]: I1206 15:34:32.875333 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"1515094bc5d8207f41508cb473cb1f9914da18fa69473fdaed194e2dd2e2ae94"} Dec 06 15:34:32 crc kubenswrapper[5003]: I1206 15:34:32.883891 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-94w25"] Dec 06 15:34:32 crc kubenswrapper[5003]: I1206 15:34:32.888360 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 06 15:34:32 crc kubenswrapper[5003]: E1206 15:34:32.889969 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-06 15:34:33.38988842 +0000 UTC m=+151.923242801 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 15:34:32 crc kubenswrapper[5003]: I1206 15:34:32.890234 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-tvwvh\" (UID: \"5cb1719e-962f-436c-bbc0-cd048de8dd14\") " pod="openshift-image-registry/image-registry-697d97f7c8-tvwvh" Dec 06 15:34:32 crc kubenswrapper[5003]: E1206 15:34:32.890630 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-06 15:34:33.39061189 +0000 UTC m=+151.923966271 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-tvwvh" (UID: "5cb1719e-962f-436c-bbc0-cd048de8dd14") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 15:34:32 crc kubenswrapper[5003]: I1206 15:34:32.891153 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-tsg4h" event={"ID":"0fc961e1-eee3-4fd5-ac99-56b85320740b","Type":"ContainerStarted","Data":"559104660f3ef6ce4ea778f769f0da4ab45cc815885921946ac532b6d1abb9c4"} Dec 06 15:34:32 crc kubenswrapper[5003]: I1206 15:34:32.901006 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-mvtxr"] Dec 06 15:34:32 crc kubenswrapper[5003]: I1206 15:34:32.918522 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-jwrlg"] Dec 06 15:34:32 crc kubenswrapper[5003]: I1206 15:34:32.925888 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-92jlq"] Dec 06 15:34:32 crc kubenswrapper[5003]: I1206 15:34:32.927690 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-4qjqw" event={"ID":"b4d4786f-591a-43fb-afe1-04c8daa257a7","Type":"ContainerStarted","Data":"3efe75bfec97530bb697c5cb29e636c4b72bd5a07fd0474fadcb00d23a33d051"} Dec 06 15:34:32 crc kubenswrapper[5003]: I1206 15:34:32.933197 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"982d1a73cc9618e033bd09656822cb1b8b3c3494c93b09cd2266625c13063c2e"} Dec 06 15:34:32 crc kubenswrapper[5003]: I1206 15:34:32.935574 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-48sc5" podStartSLOduration=126.935553686 podStartE2EDuration="2m6.935553686s" podCreationTimestamp="2025-12-06 15:32:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 15:34:32.900868875 +0000 UTC m=+151.434223256" watchObservedRunningTime="2025-12-06 15:34:32.935553686 +0000 UTC m=+151.468908057" Dec 06 15:34:32 crc kubenswrapper[5003]: I1206 15:34:32.940111 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-xzdsc" event={"ID":"343f69ef-b8b8-459c-95d1-5234344b45e0","Type":"ContainerStarted","Data":"f876c0cb7f71a384b68e7f87346e2d59f8ee04136a93f96187a83098dca6d34e"} Dec 06 15:34:32 crc kubenswrapper[5003]: I1206 15:34:32.940172 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-xzdsc" event={"ID":"343f69ef-b8b8-459c-95d1-5234344b45e0","Type":"ContainerStarted","Data":"a37a538b04c01bbd7c638c73d815a5da978a3e637fc5b628f79ba120832ab867"} Dec 06 15:34:32 crc kubenswrapper[5003]: I1206 15:34:32.941632 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"879d83a0f90b0badc45df834c76d738f8e5dbb9473f2b97387c97e07ff73f7e3"} Dec 06 15:34:32 crc kubenswrapper[5003]: I1206 15:34:32.942758 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 06 15:34:32 crc kubenswrapper[5003]: I1206 15:34:32.945805 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-5ww6p" event={"ID":"41e7b2c5-c9fe-4000-830c-bf3351dd327f","Type":"ContainerStarted","Data":"15be7c4dd4ea36b42358251f6e06c6de7447e2bb6c893458b478c9d4452e2261"} Dec 06 15:34:32 crc kubenswrapper[5003]: I1206 15:34:32.952341 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-drh5n" event={"ID":"cf6c1084-3ac6-4ac5-a15d-7e85f6cf75f5","Type":"ContainerStarted","Data":"4f3d7a7e697c53c96ea3231821a3cd6c0078a4a5dcbb614fdf7b69c7eb1a79f4"} Dec 06 15:34:32 crc kubenswrapper[5003]: I1206 15:34:32.952796 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console-operator/console-operator-58897d9998-drh5n" Dec 06 15:34:32 crc kubenswrapper[5003]: I1206 15:34:32.953692 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress/router-default-5444994796-4qjqw" podStartSLOduration=126.953679361 podStartE2EDuration="2m6.953679361s" podCreationTimestamp="2025-12-06 15:32:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 15:34:32.949310313 +0000 UTC m=+151.482664714" watchObservedRunningTime="2025-12-06 15:34:32.953679361 +0000 UTC m=+151.487033742" Dec 06 15:34:32 crc kubenswrapper[5003]: I1206 15:34:32.954426 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-ftnnx" event={"ID":"c36f84f6-f4ce-40bd-b151-211f4face9e1","Type":"ContainerStarted","Data":"838d14799bb9879e8f1df912f099974880f5d4a546d37a46fbb3f4920d34ee4b"} Dec 06 15:34:32 crc kubenswrapper[5003]: I1206 15:34:32.956089 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-npbcn" event={"ID":"cee02b0a-ce4b-452e-aa00-48c7823c13d0","Type":"ContainerStarted","Data":"595da1f7258a2da949270883c2ba5cf1f8b2e1d8280abe45423bb227f00635e4"} Dec 06 15:34:32 crc kubenswrapper[5003]: I1206 15:34:32.985695 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-5ww6p" podStartSLOduration=127.985677089 podStartE2EDuration="2m7.985677089s" podCreationTimestamp="2025-12-06 15:32:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 15:34:32.985246197 +0000 UTC m=+151.518600598" watchObservedRunningTime="2025-12-06 15:34:32.985677089 +0000 UTC m=+151.519031470" Dec 06 15:34:32 crc kubenswrapper[5003]: I1206 15:34:32.991821 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 06 15:34:32 crc kubenswrapper[5003]: E1206 15:34:32.991985 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-06 15:34:33.491956338 +0000 UTC m=+152.025310719 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 15:34:32 crc kubenswrapper[5003]: I1206 15:34:32.992079 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-tvwvh\" (UID: \"5cb1719e-962f-436c-bbc0-cd048de8dd14\") " pod="openshift-image-registry/image-registry-697d97f7c8-tvwvh" Dec 06 15:34:32 crc kubenswrapper[5003]: E1206 15:34:32.992917 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-06 15:34:33.492901123 +0000 UTC m=+152.026255504 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-tvwvh" (UID: "5cb1719e-962f-436c-bbc0-cd048de8dd14") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 15:34:33 crc kubenswrapper[5003]: I1206 15:34:33.037007 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-xzdsc" podStartSLOduration=127.036986545 podStartE2EDuration="2m7.036986545s" podCreationTimestamp="2025-12-06 15:32:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 15:34:33.036864282 +0000 UTC m=+151.570218673" watchObservedRunningTime="2025-12-06 15:34:33.036986545 +0000 UTC m=+151.570340926" Dec 06 15:34:33 crc kubenswrapper[5003]: I1206 15:34:33.056616 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-f9d7485db-npbcn" podStartSLOduration=127.05659760099999 podStartE2EDuration="2m7.056597601s" podCreationTimestamp="2025-12-06 15:32:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 15:34:33.054418282 +0000 UTC m=+151.587772683" watchObservedRunningTime="2025-12-06 15:34:33.056597601 +0000 UTC m=+151.589951982" Dec 06 15:34:33 crc kubenswrapper[5003]: I1206 15:34:33.074752 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-server-ftnnx" podStartSLOduration=6.074735297 podStartE2EDuration="6.074735297s" podCreationTimestamp="2025-12-06 15:34:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 15:34:33.073527785 +0000 UTC m=+151.606882196" watchObservedRunningTime="2025-12-06 15:34:33.074735297 +0000 UTC m=+151.608089678" Dec 06 15:34:33 crc kubenswrapper[5003]: I1206 15:34:33.094268 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 06 15:34:33 crc kubenswrapper[5003]: E1206 15:34:33.094638 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-06 15:34:33.59461589 +0000 UTC m=+152.127970281 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 15:34:33 crc kubenswrapper[5003]: I1206 15:34:33.095180 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-tvwvh\" (UID: \"5cb1719e-962f-436c-bbc0-cd048de8dd14\") " pod="openshift-image-registry/image-registry-697d97f7c8-tvwvh" Dec 06 15:34:33 crc kubenswrapper[5003]: E1206 15:34:33.096601 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-06 15:34:33.596587053 +0000 UTC m=+152.129941434 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-tvwvh" (UID: "5cb1719e-962f-436c-bbc0-cd048de8dd14") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 15:34:33 crc kubenswrapper[5003]: I1206 15:34:33.105162 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console-operator/console-operator-58897d9998-drh5n" podStartSLOduration=127.105146482 podStartE2EDuration="2m7.105146482s" podCreationTimestamp="2025-12-06 15:32:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 15:34:33.104790993 +0000 UTC m=+151.638145384" watchObservedRunningTime="2025-12-06 15:34:33.105146482 +0000 UTC m=+151.638500863" Dec 06 15:34:33 crc kubenswrapper[5003]: I1206 15:34:33.196374 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 06 15:34:33 crc kubenswrapper[5003]: E1206 15:34:33.196735 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-06 15:34:33.696710168 +0000 UTC m=+152.230064549 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 15:34:33 crc kubenswrapper[5003]: W1206 15:34:33.243327 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd7c10f1a_b1ca_4c58_882a_f5d834b31b5a.slice/crio-b42f6ffdc49783f877fb7134b7998c92039b2be2fadfd24b6f13d26e0e9ec498 WatchSource:0}: Error finding container b42f6ffdc49783f877fb7134b7998c92039b2be2fadfd24b6f13d26e0e9ec498: Status 404 returned error can't find the container with id b42f6ffdc49783f877fb7134b7998c92039b2be2fadfd24b6f13d26e0e9ec498 Dec 06 15:34:33 crc kubenswrapper[5003]: W1206 15:34:33.251189 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod09de6d60_7a17_4222_b6ea_457b9e58a937.slice/crio-78ebb3390c41861e847ad61bbab7ee2b4e7fb7b6189a6375ed18a633b3074cb9 WatchSource:0}: Error finding container 78ebb3390c41861e847ad61bbab7ee2b4e7fb7b6189a6375ed18a633b3074cb9: Status 404 returned error can't find the container with id 78ebb3390c41861e847ad61bbab7ee2b4e7fb7b6189a6375ed18a633b3074cb9 Dec 06 15:34:33 crc kubenswrapper[5003]: I1206 15:34:33.264682 5003 patch_prober.go:28] interesting pod/console-operator-58897d9998-drh5n container/console-operator namespace/openshift-console-operator: Readiness probe status=failure output="Get \"https://10.217.0.14:8443/readyz\": dial tcp 10.217.0.14:8443: connect: connection refused" start-of-body= Dec 06 15:34:33 crc kubenswrapper[5003]: I1206 15:34:33.264744 5003 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console-operator/console-operator-58897d9998-drh5n" podUID="cf6c1084-3ac6-4ac5-a15d-7e85f6cf75f5" containerName="console-operator" probeResult="failure" output="Get \"https://10.217.0.14:8443/readyz\": dial tcp 10.217.0.14:8443: connect: connection refused" Dec 06 15:34:33 crc kubenswrapper[5003]: I1206 15:34:33.298573 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-tvwvh\" (UID: \"5cb1719e-962f-436c-bbc0-cd048de8dd14\") " pod="openshift-image-registry/image-registry-697d97f7c8-tvwvh" Dec 06 15:34:33 crc kubenswrapper[5003]: E1206 15:34:33.299205 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-06 15:34:33.799190145 +0000 UTC m=+152.332544526 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-tvwvh" (UID: "5cb1719e-962f-436c-bbc0-cd048de8dd14") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 15:34:33 crc kubenswrapper[5003]: I1206 15:34:33.400124 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 06 15:34:33 crc kubenswrapper[5003]: E1206 15:34:33.400623 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-06 15:34:33.900605205 +0000 UTC m=+152.433959586 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 15:34:33 crc kubenswrapper[5003]: I1206 15:34:33.444017 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-apiserver/apiserver-76f77b778f-jx64p" Dec 06 15:34:33 crc kubenswrapper[5003]: I1206 15:34:33.444904 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-apiserver/apiserver-76f77b778f-jx64p" Dec 06 15:34:33 crc kubenswrapper[5003]: I1206 15:34:33.454199 5003 patch_prober.go:28] interesting pod/apiserver-76f77b778f-jx64p container/openshift-apiserver namespace/openshift-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[+]ping ok Dec 06 15:34:33 crc kubenswrapper[5003]: [+]log ok Dec 06 15:34:33 crc kubenswrapper[5003]: [+]etcd ok Dec 06 15:34:33 crc kubenswrapper[5003]: [+]poststarthook/start-apiserver-admission-initializer ok Dec 06 15:34:33 crc kubenswrapper[5003]: [+]poststarthook/generic-apiserver-start-informers ok Dec 06 15:34:33 crc kubenswrapper[5003]: [+]poststarthook/max-in-flight-filter ok Dec 06 15:34:33 crc kubenswrapper[5003]: [+]poststarthook/storage-object-count-tracker-hook ok Dec 06 15:34:33 crc kubenswrapper[5003]: [+]poststarthook/image.openshift.io-apiserver-caches ok Dec 06 15:34:33 crc kubenswrapper[5003]: [-]poststarthook/authorization.openshift.io-bootstrapclusterroles failed: reason withheld Dec 06 15:34:33 crc kubenswrapper[5003]: [-]poststarthook/authorization.openshift.io-ensurenodebootstrap-sa failed: reason withheld Dec 06 15:34:33 crc kubenswrapper[5003]: [+]poststarthook/project.openshift.io-projectcache ok Dec 06 15:34:33 crc kubenswrapper[5003]: [+]poststarthook/project.openshift.io-projectauthorizationcache ok Dec 06 15:34:33 crc kubenswrapper[5003]: [+]poststarthook/openshift.io-startinformers ok Dec 06 15:34:33 crc kubenswrapper[5003]: [+]poststarthook/openshift.io-restmapperupdater ok Dec 06 15:34:33 crc kubenswrapper[5003]: [+]poststarthook/quota.openshift.io-clusterquotamapping ok Dec 06 15:34:33 crc kubenswrapper[5003]: livez check failed Dec 06 15:34:33 crc kubenswrapper[5003]: I1206 15:34:33.454259 5003 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-apiserver/apiserver-76f77b778f-jx64p" podUID="9280dab9-204d-4112-98be-c6809da2ad4e" containerName="openshift-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 06 15:34:33 crc kubenswrapper[5003]: I1206 15:34:33.501974 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-tvwvh\" (UID: \"5cb1719e-962f-436c-bbc0-cd048de8dd14\") " pod="openshift-image-registry/image-registry-697d97f7c8-tvwvh" Dec 06 15:34:33 crc kubenswrapper[5003]: E1206 15:34:33.502285 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-06 15:34:34.002273541 +0000 UTC m=+152.535627922 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-tvwvh" (UID: "5cb1719e-962f-436c-bbc0-cd048de8dd14") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 15:34:33 crc kubenswrapper[5003]: I1206 15:34:33.516387 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-48sc5" Dec 06 15:34:33 crc kubenswrapper[5003]: I1206 15:34:33.516686 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-48sc5" Dec 06 15:34:33 crc kubenswrapper[5003]: I1206 15:34:33.547367 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-ingress/router-default-5444994796-4qjqw" Dec 06 15:34:33 crc kubenswrapper[5003]: I1206 15:34:33.550792 5003 patch_prober.go:28] interesting pod/router-default-5444994796-4qjqw container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 06 15:34:33 crc kubenswrapper[5003]: [-]has-synced failed: reason withheld Dec 06 15:34:33 crc kubenswrapper[5003]: [+]process-running ok Dec 06 15:34:33 crc kubenswrapper[5003]: healthz check failed Dec 06 15:34:33 crc kubenswrapper[5003]: I1206 15:34:33.550826 5003 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-4qjqw" podUID="b4d4786f-591a-43fb-afe1-04c8daa257a7" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 06 15:34:33 crc kubenswrapper[5003]: I1206 15:34:33.602884 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 06 15:34:33 crc kubenswrapper[5003]: E1206 15:34:33.603725 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-06 15:34:34.10369339 +0000 UTC m=+152.637047761 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 15:34:33 crc kubenswrapper[5003]: I1206 15:34:33.646233 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-48sc5" Dec 06 15:34:33 crc kubenswrapper[5003]: I1206 15:34:33.703350 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-hppvr"] Dec 06 15:34:33 crc kubenswrapper[5003]: I1206 15:34:33.706681 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-tvwvh\" (UID: \"5cb1719e-962f-436c-bbc0-cd048de8dd14\") " pod="openshift-image-registry/image-registry-697d97f7c8-tvwvh" Dec 06 15:34:33 crc kubenswrapper[5003]: E1206 15:34:33.707023 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-06 15:34:34.207006841 +0000 UTC m=+152.740361222 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-tvwvh" (UID: "5cb1719e-962f-436c-bbc0-cd048de8dd14") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 15:34:33 crc kubenswrapper[5003]: I1206 15:34:33.808835 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 06 15:34:33 crc kubenswrapper[5003]: E1206 15:34:33.809250 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-06 15:34:34.309221511 +0000 UTC m=+152.842575922 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 15:34:33 crc kubenswrapper[5003]: I1206 15:34:33.820734 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-bvkqz"] Dec 06 15:34:33 crc kubenswrapper[5003]: I1206 15:34:33.824882 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-wx2w9"] Dec 06 15:34:33 crc kubenswrapper[5003]: I1206 15:34:33.861771 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-t2c5q"] Dec 06 15:34:33 crc kubenswrapper[5003]: I1206 15:34:33.864198 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-rqfnv"] Dec 06 15:34:33 crc kubenswrapper[5003]: I1206 15:34:33.875400 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-rrdrz"] Dec 06 15:34:33 crc kubenswrapper[5003]: I1206 15:34:33.890478 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-xvg94"] Dec 06 15:34:33 crc kubenswrapper[5003]: I1206 15:34:33.900773 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-vr8wv"] Dec 06 15:34:33 crc kubenswrapper[5003]: I1206 15:34:33.911621 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-pt2wd"] Dec 06 15:34:33 crc kubenswrapper[5003]: W1206 15:34:33.913206 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode3de71c3_340a_4492_8235_043eeb8bc509.slice/crio-c9bfec89c38819eb5682f1f6e5ff5dbb72490ec86888068edb668d5df94c9ad7 WatchSource:0}: Error finding container c9bfec89c38819eb5682f1f6e5ff5dbb72490ec86888068edb668d5df94c9ad7: Status 404 returned error can't find the container with id c9bfec89c38819eb5682f1f6e5ff5dbb72490ec86888068edb668d5df94c9ad7 Dec 06 15:34:33 crc kubenswrapper[5003]: I1206 15:34:33.913249 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-tvwvh\" (UID: \"5cb1719e-962f-436c-bbc0-cd048de8dd14\") " pod="openshift-image-registry/image-registry-697d97f7c8-tvwvh" Dec 06 15:34:33 crc kubenswrapper[5003]: E1206 15:34:33.916205 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-06 15:34:34.41619155 +0000 UTC m=+152.949545931 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-tvwvh" (UID: "5cb1719e-962f-436c-bbc0-cd048de8dd14") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 15:34:33 crc kubenswrapper[5003]: W1206 15:34:33.931556 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode109ae1a_d737_4150_9e67_728d9d8d32dc.slice/crio-862aa854e20787e82f78160a818eafeff2ec3fb39b486c0f971e5471138bbed7 WatchSource:0}: Error finding container 862aa854e20787e82f78160a818eafeff2ec3fb39b486c0f971e5471138bbed7: Status 404 returned error can't find the container with id 862aa854e20787e82f78160a818eafeff2ec3fb39b486c0f971e5471138bbed7 Dec 06 15:34:33 crc kubenswrapper[5003]: I1206 15:34:33.931611 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-xs4nd"] Dec 06 15:34:33 crc kubenswrapper[5003]: I1206 15:34:33.934438 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-7w4jb"] Dec 06 15:34:33 crc kubenswrapper[5003]: W1206 15:34:33.944861 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod60490c57_c15a_4479_a735_257c6f60f1b0.slice/crio-b4f91979432a6e9255e6a4680e3f1d46631b63b46d29ff61d64c86628456fea1 WatchSource:0}: Error finding container b4f91979432a6e9255e6a4680e3f1d46631b63b46d29ff61d64c86628456fea1: Status 404 returned error can't find the container with id b4f91979432a6e9255e6a4680e3f1d46631b63b46d29ff61d64c86628456fea1 Dec 06 15:34:33 crc kubenswrapper[5003]: I1206 15:34:33.954197 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-p7hq2"] Dec 06 15:34:33 crc kubenswrapper[5003]: I1206 15:34:33.957564 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-mhpsl"] Dec 06 15:34:33 crc kubenswrapper[5003]: I1206 15:34:33.962387 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8fpj2"] Dec 06 15:34:33 crc kubenswrapper[5003]: I1206 15:34:33.979723 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-xvg94" event={"ID":"af89c7bf-ebbc-464e-b11a-c7343acbf887","Type":"ContainerStarted","Data":"634e8ed8103017d1db948724ec31b3caee28e38e49547a410ef25e465c5bc2dc"} Dec 06 15:34:33 crc kubenswrapper[5003]: I1206 15:34:33.985100 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29417250-pmq24"] Dec 06 15:34:33 crc kubenswrapper[5003]: I1206 15:34:33.985913 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-7w4jb" event={"ID":"60490c57-c15a-4479-a735-257c6f60f1b0","Type":"ContainerStarted","Data":"b4f91979432a6e9255e6a4680e3f1d46631b63b46d29ff61d64c86628456fea1"} Dec 06 15:34:33 crc kubenswrapper[5003]: I1206 15:34:33.989340 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-2qwm4"] Dec 06 15:34:33 crc kubenswrapper[5003]: I1206 15:34:33.989730 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-bvkqz" event={"ID":"a48bd0ed-4703-46dd-9586-6141cfe7b15e","Type":"ContainerStarted","Data":"d4d9928fc6b255f3918e3a757599cec3bc13a24311c7e7e02557587fd153b9d1"} Dec 06 15:34:33 crc kubenswrapper[5003]: I1206 15:34:33.990911 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-pt2wd" event={"ID":"1f3db07f-b32b-46e0-b697-a5140c5021cd","Type":"ContainerStarted","Data":"9f279d660236b8084a0f821f5885d908dffbfff4554648f2c785570ed492ccac"} Dec 06 15:34:33 crc kubenswrapper[5003]: I1206 15:34:33.991153 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-l8t2q"] Dec 06 15:34:33 crc kubenswrapper[5003]: W1206 15:34:33.991890 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc21f1e83_8b5a_4fee_b51e_3617d90b23f8.slice/crio-271f42ee47b45ddf9c44774dd2cab243c6f2e2210f6c254935e34c931ad04c47 WatchSource:0}: Error finding container 271f42ee47b45ddf9c44774dd2cab243c6f2e2210f6c254935e34c931ad04c47: Status 404 returned error can't find the container with id 271f42ee47b45ddf9c44774dd2cab243c6f2e2210f6c254935e34c931ad04c47 Dec 06 15:34:33 crc kubenswrapper[5003]: I1206 15:34:33.992012 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-vr8wv" event={"ID":"58dc4be2-81aa-4567-b800-1b77019a7eca","Type":"ContainerStarted","Data":"13ca7f38c830d3ee83dec93d6eccb5029a7e0aef7240fe770339529fd5178797"} Dec 06 15:34:34 crc kubenswrapper[5003]: I1206 15:34:34.005598 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-rjl56"] Dec 06 15:34:34 crc kubenswrapper[5003]: I1206 15:34:34.005746 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-rqfnv" event={"ID":"e109ae1a-d737-4150-9e67-728d9d8d32dc","Type":"ContainerStarted","Data":"862aa854e20787e82f78160a818eafeff2ec3fb39b486c0f971e5471138bbed7"} Dec 06 15:34:34 crc kubenswrapper[5003]: W1206 15:34:34.015947 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9c863b8e_b487_438d_a745_e3f41c2ef92c.slice/crio-427033c29a8715780078983fd7dd8f41860ab62d4872430062bcc37eb7cc6e4f WatchSource:0}: Error finding container 427033c29a8715780078983fd7dd8f41860ab62d4872430062bcc37eb7cc6e4f: Status 404 returned error can't find the container with id 427033c29a8715780078983fd7dd8f41860ab62d4872430062bcc37eb7cc6e4f Dec 06 15:34:34 crc kubenswrapper[5003]: I1206 15:34:34.016347 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 06 15:34:34 crc kubenswrapper[5003]: E1206 15:34:34.016519 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-06 15:34:34.516470088 +0000 UTC m=+153.049824479 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 15:34:34 crc kubenswrapper[5003]: I1206 15:34:34.016786 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-tvwvh\" (UID: \"5cb1719e-962f-436c-bbc0-cd048de8dd14\") " pod="openshift-image-registry/image-registry-697d97f7c8-tvwvh" Dec 06 15:34:34 crc kubenswrapper[5003]: E1206 15:34:34.017208 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-06 15:34:34.517197048 +0000 UTC m=+153.050551509 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-tvwvh" (UID: "5cb1719e-962f-436c-bbc0-cd048de8dd14") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 15:34:34 crc kubenswrapper[5003]: I1206 15:34:34.017646 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-x6k88"] Dec 06 15:34:34 crc kubenswrapper[5003]: I1206 15:34:34.019711 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-854zg"] Dec 06 15:34:34 crc kubenswrapper[5003]: I1206 15:34:34.021401 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-fp6zg"] Dec 06 15:34:34 crc kubenswrapper[5003]: I1206 15:34:34.023049 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-dc5hk"] Dec 06 15:34:34 crc kubenswrapper[5003]: I1206 15:34:34.025042 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-92jlq" event={"ID":"09de6d60-7a17-4222-b6ea-457b9e58a937","Type":"ContainerStarted","Data":"8051fabc515c57826f1f68ac51c43aa3fd6a6dd0566889a8bf7fcc0c88e3d2b4"} Dec 06 15:34:34 crc kubenswrapper[5003]: I1206 15:34:34.025134 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-92jlq" event={"ID":"09de6d60-7a17-4222-b6ea-457b9e58a937","Type":"ContainerStarted","Data":"78ebb3390c41861e847ad61bbab7ee2b4e7fb7b6189a6375ed18a633b3074cb9"} Dec 06 15:34:34 crc kubenswrapper[5003]: I1206 15:34:34.029908 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-t2c5q" event={"ID":"78a9da4a-a389-4286-a4cc-d2924052721a","Type":"ContainerStarted","Data":"e9f9b5a133ca7fcf6affec19bd784b182ee84c10c65530e49d0ae785eed81662"} Dec 06 15:34:34 crc kubenswrapper[5003]: W1206 15:34:34.035618 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podfb3e806c_c739_4628_bf9c_8745195fce4c.slice/crio-c9ec615c7079858d25126490834d802c074a1b0506bc1d923821988902ef4236 WatchSource:0}: Error finding container c9ec615c7079858d25126490834d802c074a1b0506bc1d923821988902ef4236: Status 404 returned error can't find the container with id c9ec615c7079858d25126490834d802c074a1b0506bc1d923821988902ef4236 Dec 06 15:34:34 crc kubenswrapper[5003]: I1206 15:34:34.036543 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-mvtxr" event={"ID":"d7c10f1a-b1ca-4c58-882a-f5d834b31b5a","Type":"ContainerStarted","Data":"8a83cb9f3b8e8c6ca8f012839c67e2759b6de4048efb76d2e72675e4316ab0e1"} Dec 06 15:34:34 crc kubenswrapper[5003]: I1206 15:34:34.036581 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-mvtxr" event={"ID":"d7c10f1a-b1ca-4c58-882a-f5d834b31b5a","Type":"ContainerStarted","Data":"b42f6ffdc49783f877fb7134b7998c92039b2be2fadfd24b6f13d26e0e9ec498"} Dec 06 15:34:34 crc kubenswrapper[5003]: W1206 15:34:34.040934 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0fca25a0_30bc_4906_8557_552531236ee4.slice/crio-e185faae5d96a034c6f1b06b6fff0b6ced86b0b54f21f579766017c6637af16b WatchSource:0}: Error finding container e185faae5d96a034c6f1b06b6fff0b6ced86b0b54f21f579766017c6637af16b: Status 404 returned error can't find the container with id e185faae5d96a034c6f1b06b6fff0b6ced86b0b54f21f579766017c6637af16b Dec 06 15:34:34 crc kubenswrapper[5003]: I1206 15:34:34.041110 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-rrdrz" event={"ID":"e3de71c3-340a-4492-8235-043eeb8bc509","Type":"ContainerStarted","Data":"c9bfec89c38819eb5682f1f6e5ff5dbb72490ec86888068edb668d5df94c9ad7"} Dec 06 15:34:34 crc kubenswrapper[5003]: I1206 15:34:34.044353 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-94w25" event={"ID":"61f3ad0c-2191-4ea5-96ed-763ca80fbcba","Type":"ContainerStarted","Data":"f9ce9203021188de0fce1b254e67baaef10f9c2b7a5683e40a5fe51713229f85"} Dec 06 15:34:34 crc kubenswrapper[5003]: I1206 15:34:34.044564 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-94w25" event={"ID":"61f3ad0c-2191-4ea5-96ed-763ca80fbcba","Type":"ContainerStarted","Data":"010e1508112a7d871ebce2aaf5817d6a75ac508f067b1cc713e6c7193d27f738"} Dec 06 15:34:34 crc kubenswrapper[5003]: I1206 15:34:34.044646 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-94w25" event={"ID":"61f3ad0c-2191-4ea5-96ed-763ca80fbcba","Type":"ContainerStarted","Data":"0705d43508410a737be67cd478c4f0e3f1adebbed148a3c94d2b62b799f74976"} Dec 06 15:34:34 crc kubenswrapper[5003]: I1206 15:34:34.052413 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"ee75ad2a7a0302ad394917ec4ff5697230f60ac0b4caa547ba80d9826ec24bb7"} Dec 06 15:34:34 crc kubenswrapper[5003]: I1206 15:34:34.059135 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-92jlq" podStartSLOduration=129.059114232 podStartE2EDuration="2m9.059114232s" podCreationTimestamp="2025-12-06 15:32:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 15:34:34.041416637 +0000 UTC m=+152.574771018" watchObservedRunningTime="2025-12-06 15:34:34.059114232 +0000 UTC m=+152.592468623" Dec 06 15:34:34 crc kubenswrapper[5003]: I1206 15:34:34.063690 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-hppvr" event={"ID":"e7652f19-206e-401f-8424-e2af50465b27","Type":"ContainerStarted","Data":"1208ae693b934b4407556e43995159c94fcb5617e2345d31ed5f790f791d0768"} Dec 06 15:34:34 crc kubenswrapper[5003]: I1206 15:34:34.063745 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-hppvr" event={"ID":"e7652f19-206e-401f-8424-e2af50465b27","Type":"ContainerStarted","Data":"aa18c620041a603feeb4af59dca7b7d28cc95b77583bcc34f48a40d5bf7967d5"} Dec 06 15:34:34 crc kubenswrapper[5003]: I1206 15:34:34.063781 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-gqgvg"] Dec 06 15:34:34 crc kubenswrapper[5003]: I1206 15:34:34.064138 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns-operator/dns-operator-744455d44c-mvtxr" podStartSLOduration=128.064121106 podStartE2EDuration="2m8.064121106s" podCreationTimestamp="2025-12-06 15:32:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 15:34:34.059826531 +0000 UTC m=+152.593180922" watchObservedRunningTime="2025-12-06 15:34:34.064121106 +0000 UTC m=+152.597475487" Dec 06 15:34:34 crc kubenswrapper[5003]: I1206 15:34:34.078006 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-jwrlg" event={"ID":"4f00accf-ea52-4f16-9749-4af762d99a60","Type":"ContainerStarted","Data":"735f88ac3989871ebe3d260b6941bfcba4375aaffbd6f38650d95574d5bb26e2"} Dec 06 15:34:34 crc kubenswrapper[5003]: I1206 15:34:34.078054 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-jwrlg" event={"ID":"4f00accf-ea52-4f16-9749-4af762d99a60","Type":"ContainerStarted","Data":"6b9f20655f4e620e0ae0c15179095b63eb8e97178449a91dc5b7376acb39d8c1"} Dec 06 15:34:34 crc kubenswrapper[5003]: I1206 15:34:34.078064 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-jwrlg" event={"ID":"4f00accf-ea52-4f16-9749-4af762d99a60","Type":"ContainerStarted","Data":"0fa05700a300b0c819f5a922bf5da48c3802b0cd68f7cdfb42bf700a860c2ee3"} Dec 06 15:34:34 crc kubenswrapper[5003]: I1206 15:34:34.080783 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-p7xwd" Dec 06 15:34:34 crc kubenswrapper[5003]: I1206 15:34:34.088854 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-94w25" podStartSLOduration=128.088842439 podStartE2EDuration="2m8.088842439s" podCreationTimestamp="2025-12-06 15:32:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 15:34:34.084093222 +0000 UTC m=+152.617447623" watchObservedRunningTime="2025-12-06 15:34:34.088842439 +0000 UTC m=+152.622196820" Dec 06 15:34:34 crc kubenswrapper[5003]: I1206 15:34:34.111639 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-lwc4r"] Dec 06 15:34:34 crc kubenswrapper[5003]: I1206 15:34:34.117446 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 06 15:34:34 crc kubenswrapper[5003]: E1206 15:34:34.117723 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-06 15:34:34.617703133 +0000 UTC m=+153.151057514 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 15:34:34 crc kubenswrapper[5003]: I1206 15:34:34.117862 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-tvwvh\" (UID: \"5cb1719e-962f-436c-bbc0-cd048de8dd14\") " pod="openshift-image-registry/image-registry-697d97f7c8-tvwvh" Dec 06 15:34:34 crc kubenswrapper[5003]: E1206 15:34:34.119024 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-06 15:34:34.619013168 +0000 UTC m=+153.152367549 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-tvwvh" (UID: "5cb1719e-962f-436c-bbc0-cd048de8dd14") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 15:34:34 crc kubenswrapper[5003]: W1206 15:34:34.122759 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd32d51fd_e8e1_4bc7_a1b4_be5c851a8651.slice/crio-98a981ace0963cdca84e941d4a3b0bd13bb6f09803e68baeb814879f3d845b56 WatchSource:0}: Error finding container 98a981ace0963cdca84e941d4a3b0bd13bb6f09803e68baeb814879f3d845b56: Status 404 returned error can't find the container with id 98a981ace0963cdca84e941d4a3b0bd13bb6f09803e68baeb814879f3d845b56 Dec 06 15:34:34 crc kubenswrapper[5003]: I1206 15:34:34.122775 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-wx2w9" event={"ID":"de472d67-6d24-44c5-becf-1cd20d390264","Type":"ContainerStarted","Data":"c1d6cac523cd145e7fbd87623905199e5019aae75e08c209f1b06b745b0bf2fe"} Dec 06 15:34:34 crc kubenswrapper[5003]: I1206 15:34:34.138695 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-tsg4h" event={"ID":"0fc961e1-eee3-4fd5-ac99-56b85320740b","Type":"ContainerStarted","Data":"fc154be70cccef24292ecbfe62be9985bf7018b340d737f07c6a0c02e9821407"} Dec 06 15:34:34 crc kubenswrapper[5003]: I1206 15:34:34.139156 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-558db77b4-tsg4h" Dec 06 15:34:34 crc kubenswrapper[5003]: I1206 15:34:34.142600 5003 patch_prober.go:28] interesting pod/oauth-openshift-558db77b4-tsg4h container/oauth-openshift namespace/openshift-authentication: Readiness probe status=failure output="Get \"https://10.217.0.11:6443/healthz\": dial tcp 10.217.0.11:6443: connect: connection refused" start-of-body= Dec 06 15:34:34 crc kubenswrapper[5003]: I1206 15:34:34.142641 5003 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-authentication/oauth-openshift-558db77b4-tsg4h" podUID="0fc961e1-eee3-4fd5-ac99-56b85320740b" containerName="oauth-openshift" probeResult="failure" output="Get \"https://10.217.0.11:6443/healthz\": dial tcp 10.217.0.11:6443: connect: connection refused" Dec 06 15:34:34 crc kubenswrapper[5003]: I1206 15:34:34.178591 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-jwrlg" podStartSLOduration=128.178570305 podStartE2EDuration="2m8.178570305s" podCreationTimestamp="2025-12-06 15:32:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 15:34:34.17800953 +0000 UTC m=+152.711363921" watchObservedRunningTime="2025-12-06 15:34:34.178570305 +0000 UTC m=+152.711924686" Dec 06 15:34:34 crc kubenswrapper[5003]: I1206 15:34:34.185617 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-xs4nd" event={"ID":"495babf4-9201-4523-8a21-44e001d4f4c1","Type":"ContainerStarted","Data":"2822fb82a8ae96cdb8bd7a18433f18f3d5695bdc0e3eb2ecdaa0aeb148b3c4cb"} Dec 06 15:34:34 crc kubenswrapper[5003]: I1206 15:34:34.193947 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-48sc5" Dec 06 15:34:34 crc kubenswrapper[5003]: I1206 15:34:34.202781 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-bk7j8"] Dec 06 15:34:34 crc kubenswrapper[5003]: I1206 15:34:34.219691 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 06 15:34:34 crc kubenswrapper[5003]: E1206 15:34:34.221095 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-06 15:34:34.721074824 +0000 UTC m=+153.254429215 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 15:34:34 crc kubenswrapper[5003]: I1206 15:34:34.253915 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console-operator/console-operator-58897d9998-drh5n" Dec 06 15:34:34 crc kubenswrapper[5003]: I1206 15:34:34.257840 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication-operator/authentication-operator-69f744f599-hppvr" podStartSLOduration=129.25781916 podStartE2EDuration="2m9.25781916s" podCreationTimestamp="2025-12-06 15:32:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 15:34:34.215285389 +0000 UTC m=+152.748639790" watchObservedRunningTime="2025-12-06 15:34:34.25781916 +0000 UTC m=+152.791173551" Dec 06 15:34:34 crc kubenswrapper[5003]: I1206 15:34:34.262216 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-558db77b4-tsg4h" podStartSLOduration=129.262185837 podStartE2EDuration="2m9.262185837s" podCreationTimestamp="2025-12-06 15:32:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 15:34:34.257264015 +0000 UTC m=+152.790618416" watchObservedRunningTime="2025-12-06 15:34:34.262185837 +0000 UTC m=+152.795540218" Dec 06 15:34:34 crc kubenswrapper[5003]: I1206 15:34:34.322747 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-tvwvh\" (UID: \"5cb1719e-962f-436c-bbc0-cd048de8dd14\") " pod="openshift-image-registry/image-registry-697d97f7c8-tvwvh" Dec 06 15:34:34 crc kubenswrapper[5003]: E1206 15:34:34.332965 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-06 15:34:34.832948074 +0000 UTC m=+153.366302465 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-tvwvh" (UID: "5cb1719e-962f-436c-bbc0-cd048de8dd14") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 15:34:34 crc kubenswrapper[5003]: I1206 15:34:34.424856 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 06 15:34:34 crc kubenswrapper[5003]: E1206 15:34:34.425228 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-06 15:34:34.925208868 +0000 UTC m=+153.458563249 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 15:34:34 crc kubenswrapper[5003]: I1206 15:34:34.528220 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-tvwvh\" (UID: \"5cb1719e-962f-436c-bbc0-cd048de8dd14\") " pod="openshift-image-registry/image-registry-697d97f7c8-tvwvh" Dec 06 15:34:34 crc kubenswrapper[5003]: E1206 15:34:34.528548 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-06 15:34:35.028534099 +0000 UTC m=+153.561888480 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-tvwvh" (UID: "5cb1719e-962f-436c-bbc0-cd048de8dd14") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 15:34:34 crc kubenswrapper[5003]: I1206 15:34:34.584400 5003 patch_prober.go:28] interesting pod/router-default-5444994796-4qjqw container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 06 15:34:34 crc kubenswrapper[5003]: [-]has-synced failed: reason withheld Dec 06 15:34:34 crc kubenswrapper[5003]: [+]process-running ok Dec 06 15:34:34 crc kubenswrapper[5003]: healthz check failed Dec 06 15:34:34 crc kubenswrapper[5003]: I1206 15:34:34.584454 5003 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-4qjqw" podUID="b4d4786f-591a-43fb-afe1-04c8daa257a7" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 06 15:34:34 crc kubenswrapper[5003]: I1206 15:34:34.630636 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 06 15:34:34 crc kubenswrapper[5003]: E1206 15:34:34.631019 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-06 15:34:35.130984756 +0000 UTC m=+153.664339147 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 15:34:34 crc kubenswrapper[5003]: I1206 15:34:34.631109 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-tvwvh\" (UID: \"5cb1719e-962f-436c-bbc0-cd048de8dd14\") " pod="openshift-image-registry/image-registry-697d97f7c8-tvwvh" Dec 06 15:34:34 crc kubenswrapper[5003]: E1206 15:34:34.631500 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-06 15:34:35.131469539 +0000 UTC m=+153.664823920 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-tvwvh" (UID: "5cb1719e-962f-436c-bbc0-cd048de8dd14") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 15:34:34 crc kubenswrapper[5003]: I1206 15:34:34.731758 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 06 15:34:34 crc kubenswrapper[5003]: E1206 15:34:34.732716 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-06 15:34:35.232691763 +0000 UTC m=+153.766046144 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 15:34:34 crc kubenswrapper[5003]: I1206 15:34:34.833597 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-tvwvh\" (UID: \"5cb1719e-962f-436c-bbc0-cd048de8dd14\") " pod="openshift-image-registry/image-registry-697d97f7c8-tvwvh" Dec 06 15:34:34 crc kubenswrapper[5003]: E1206 15:34:34.833920 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-06 15:34:35.333909907 +0000 UTC m=+153.867264288 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-tvwvh" (UID: "5cb1719e-962f-436c-bbc0-cd048de8dd14") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 15:34:34 crc kubenswrapper[5003]: I1206 15:34:34.935021 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 06 15:34:34 crc kubenswrapper[5003]: E1206 15:34:34.935909 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-06 15:34:35.435234373 +0000 UTC m=+153.968588754 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 15:34:34 crc kubenswrapper[5003]: I1206 15:34:34.939087 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-tvwvh\" (UID: \"5cb1719e-962f-436c-bbc0-cd048de8dd14\") " pod="openshift-image-registry/image-registry-697d97f7c8-tvwvh" Dec 06 15:34:34 crc kubenswrapper[5003]: E1206 15:34:34.939724 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-06 15:34:35.439709114 +0000 UTC m=+153.973063495 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-tvwvh" (UID: "5cb1719e-962f-436c-bbc0-cd048de8dd14") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 15:34:35 crc kubenswrapper[5003]: I1206 15:34:35.050788 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 06 15:34:35 crc kubenswrapper[5003]: E1206 15:34:35.051756 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-06 15:34:35.551736608 +0000 UTC m=+154.085090989 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 15:34:35 crc kubenswrapper[5003]: I1206 15:34:35.155808 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-tvwvh\" (UID: \"5cb1719e-962f-436c-bbc0-cd048de8dd14\") " pod="openshift-image-registry/image-registry-697d97f7c8-tvwvh" Dec 06 15:34:35 crc kubenswrapper[5003]: E1206 15:34:35.156153 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-06 15:34:35.656140417 +0000 UTC m=+154.189494798 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-tvwvh" (UID: "5cb1719e-962f-436c-bbc0-cd048de8dd14") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 15:34:35 crc kubenswrapper[5003]: I1206 15:34:35.198362 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-rqfnv" event={"ID":"e109ae1a-d737-4150-9e67-728d9d8d32dc","Type":"ContainerStarted","Data":"7fe1864bed942ee73ae13aabab36eb6259f675343b3eeee72d85052ffaf9de4a"} Dec 06 15:34:35 crc kubenswrapper[5003]: I1206 15:34:35.200529 5003 patch_prober.go:28] interesting pod/catalog-operator-68c6474976-rqfnv container/catalog-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.24:8443/healthz\": dial tcp 10.217.0.24:8443: connect: connection refused" start-of-body= Dec 06 15:34:35 crc kubenswrapper[5003]: I1206 15:34:35.200569 5003 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-rqfnv" podUID="e109ae1a-d737-4150-9e67-728d9d8d32dc" containerName="catalog-operator" probeResult="failure" output="Get \"https://10.217.0.24:8443/healthz\": dial tcp 10.217.0.24:8443: connect: connection refused" Dec 06 15:34:35 crc kubenswrapper[5003]: I1206 15:34:35.200823 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-rqfnv" Dec 06 15:34:35 crc kubenswrapper[5003]: I1206 15:34:35.201865 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-pt2wd" event={"ID":"1f3db07f-b32b-46e0-b697-a5140c5021cd","Type":"ContainerStarted","Data":"6e9f6929e5895a1ab882a794bf50b5c92f20cc8ffc04857eb676893220abc7ee"} Dec 06 15:34:35 crc kubenswrapper[5003]: I1206 15:34:35.203319 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-pt2wd" Dec 06 15:34:35 crc kubenswrapper[5003]: I1206 15:34:35.203419 5003 patch_prober.go:28] interesting pod/olm-operator-6b444d44fb-pt2wd container/olm-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.40:8443/healthz\": dial tcp 10.217.0.40:8443: connect: connection refused" start-of-body= Dec 06 15:34:35 crc kubenswrapper[5003]: I1206 15:34:35.203439 5003 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-pt2wd" podUID="1f3db07f-b32b-46e0-b697-a5140c5021cd" containerName="olm-operator" probeResult="failure" output="Get \"https://10.217.0.40:8443/healthz\": dial tcp 10.217.0.40:8443: connect: connection refused" Dec 06 15:34:35 crc kubenswrapper[5003]: I1206 15:34:35.208985 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-rrdrz" event={"ID":"e3de71c3-340a-4492-8235-043eeb8bc509","Type":"ContainerStarted","Data":"011bb28cd44e2f0e501f1df44b468a6a55a2dde0604eb3f73ec20a78ddb01bdc"} Dec 06 15:34:35 crc kubenswrapper[5003]: I1206 15:34:35.210722 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-bk7j8" event={"ID":"4e8d2519-5570-4ac9-8968-3015f3658ef0","Type":"ContainerStarted","Data":"50a97b3363f9cab4a45ee57e88b7c6fa3e34c3be62db8c273dac6a4d862ec696"} Dec 06 15:34:35 crc kubenswrapper[5003]: I1206 15:34:35.211881 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-t2c5q" event={"ID":"78a9da4a-a389-4286-a4cc-d2924052721a","Type":"ContainerStarted","Data":"c007de54935bd4c8da1fae57237099bae2f90acef8b666f29e74fa7eec282ab6"} Dec 06 15:34:35 crc kubenswrapper[5003]: I1206 15:34:35.213761 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-rjl56" event={"ID":"460fedd0-6ec4-4ef5-91ca-cc62ae21ebc4","Type":"ContainerStarted","Data":"7460d58578ecda57fe7e37260bafe3f0d5a13ffb05eb50b1726926e6c0df8799"} Dec 06 15:34:35 crc kubenswrapper[5003]: I1206 15:34:35.214696 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-854zg" event={"ID":"dc7def17-767d-47b6-ad2f-4cf73e84ea5e","Type":"ContainerStarted","Data":"54e9f38fa24cb09ddbdd390fa2a7a8315e327693857eec85441bca6a0ef64868"} Dec 06 15:34:35 crc kubenswrapper[5003]: I1206 15:34:35.215498 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-gqgvg" event={"ID":"d32d51fd-e8e1-4bc7-a1b4-be5c851a8651","Type":"ContainerStarted","Data":"98a981ace0963cdca84e941d4a3b0bd13bb6f09803e68baeb814879f3d845b56"} Dec 06 15:34:35 crc kubenswrapper[5003]: I1206 15:34:35.216716 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-dc5hk" event={"ID":"0fca25a0-30bc-4906-8557-552531236ee4","Type":"ContainerStarted","Data":"4bb76dff5b7fd6f2239ca01e00d104cb7fbb5ec27a5edf8fc58ddc4f1878019a"} Dec 06 15:34:35 crc kubenswrapper[5003]: I1206 15:34:35.216738 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-dc5hk" event={"ID":"0fca25a0-30bc-4906-8557-552531236ee4","Type":"ContainerStarted","Data":"e185faae5d96a034c6f1b06b6fff0b6ced86b0b54f21f579766017c6637af16b"} Dec 06 15:34:35 crc kubenswrapper[5003]: I1206 15:34:35.217694 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-p7hq2" event={"ID":"150d06be-be0f-4425-a584-760d19d009b5","Type":"ContainerStarted","Data":"3e29bf7c6f59c4c5a9497d13988ee01f820cd225e2989b4546f8e2197465d254"} Dec 06 15:34:35 crc kubenswrapper[5003]: I1206 15:34:35.218651 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8fpj2" event={"ID":"e6995aa9-a5fd-4994-bbc0-a6ed0b630fa1","Type":"ContainerStarted","Data":"b36e19baeb233cf4e5cd87a27b5d84dab3710a5a6d59336f038635d7bcef562e"} Dec 06 15:34:35 crc kubenswrapper[5003]: I1206 15:34:35.219327 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-lwc4r" event={"ID":"765bb4a4-7c41-414b-a9be-a54be49b76ff","Type":"ContainerStarted","Data":"07bc3a9140f6ab01c341d730484a891bf52fca94a8c3ce59f871d0f460ddfadb"} Dec 06 15:34:35 crc kubenswrapper[5003]: I1206 15:34:35.220356 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-mhpsl" event={"ID":"3a9d6180-dff8-46d9-92df-20ff4b1b466f","Type":"ContainerStarted","Data":"50947489b1d01ff705a6d9b72a5ea9860c8f98195796c2bc92327eed8dc1351f"} Dec 06 15:34:35 crc kubenswrapper[5003]: I1206 15:34:35.220372 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-mhpsl" event={"ID":"3a9d6180-dff8-46d9-92df-20ff4b1b466f","Type":"ContainerStarted","Data":"3592aece97e0c8fb77f822cf063b793165f097eca20445b2d8d8b328ae6340c4"} Dec 06 15:34:35 crc kubenswrapper[5003]: I1206 15:34:35.221050 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-mhpsl" Dec 06 15:34:35 crc kubenswrapper[5003]: I1206 15:34:35.221891 5003 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-mhpsl container/packageserver namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.26:5443/healthz\": dial tcp 10.217.0.26:5443: connect: connection refused" start-of-body= Dec 06 15:34:35 crc kubenswrapper[5003]: I1206 15:34:35.221921 5003 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-mhpsl" podUID="3a9d6180-dff8-46d9-92df-20ff4b1b466f" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.26:5443/healthz\": dial tcp 10.217.0.26:5443: connect: connection refused" Dec 06 15:34:35 crc kubenswrapper[5003]: I1206 15:34:35.223087 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-l8t2q" event={"ID":"c21f1e83-8b5a-4fee-b51e-3617d90b23f8","Type":"ContainerStarted","Data":"28e8433ac36ed91d4e0ba98cd067fd0f9fd634602e7c580706f6167444ec8508"} Dec 06 15:34:35 crc kubenswrapper[5003]: I1206 15:34:35.223105 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-l8t2q" event={"ID":"c21f1e83-8b5a-4fee-b51e-3617d90b23f8","Type":"ContainerStarted","Data":"271f42ee47b45ddf9c44774dd2cab243c6f2e2210f6c254935e34c931ad04c47"} Dec 06 15:34:35 crc kubenswrapper[5003]: I1206 15:34:35.226223 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-wx2w9" event={"ID":"de472d67-6d24-44c5-becf-1cd20d390264","Type":"ContainerStarted","Data":"feab49f5aaf5da2f863ad1d470cef30751798a7dee824ff58607cf7e08838f51"} Dec 06 15:34:35 crc kubenswrapper[5003]: I1206 15:34:35.297195 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-mvtxr" event={"ID":"d7c10f1a-b1ca-4c58-882a-f5d834b31b5a","Type":"ContainerStarted","Data":"083e70e6092ca2881d06aeae12002d2cdc174992524c03c1cfc81161705c521e"} Dec 06 15:34:35 crc kubenswrapper[5003]: I1206 15:34:35.332844 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 06 15:34:35 crc kubenswrapper[5003]: E1206 15:34:35.332990 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-06 15:34:35.832959228 +0000 UTC m=+154.366313609 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 15:34:35 crc kubenswrapper[5003]: I1206 15:34:35.333434 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-tvwvh\" (UID: \"5cb1719e-962f-436c-bbc0-cd048de8dd14\") " pod="openshift-image-registry/image-registry-697d97f7c8-tvwvh" Dec 06 15:34:35 crc kubenswrapper[5003]: E1206 15:34:35.337430 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-06 15:34:35.837416868 +0000 UTC m=+154.370771249 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-tvwvh" (UID: "5cb1719e-962f-436c-bbc0-cd048de8dd14") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 15:34:35 crc kubenswrapper[5003]: I1206 15:34:35.375921 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-rqfnv" podStartSLOduration=129.37589684 podStartE2EDuration="2m9.37589684s" podCreationTimestamp="2025-12-06 15:32:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 15:34:35.374219584 +0000 UTC m=+153.907573985" watchObservedRunningTime="2025-12-06 15:34:35.37589684 +0000 UTC m=+153.909251221" Dec 06 15:34:35 crc kubenswrapper[5003]: I1206 15:34:35.394468 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-bvkqz" event={"ID":"a48bd0ed-4703-46dd-9586-6141cfe7b15e","Type":"ContainerStarted","Data":"d28df3da8a5cbdc3448cdf299ef9dd8c81b040c7167d498e81fc1e00585363d7"} Dec 06 15:34:35 crc kubenswrapper[5003]: I1206 15:34:35.412245 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-2qwm4" event={"ID":"9c863b8e-b487-438d-a745-e3f41c2ef92c","Type":"ContainerStarted","Data":"427033c29a8715780078983fd7dd8f41860ab62d4872430062bcc37eb7cc6e4f"} Dec 06 15:34:35 crc kubenswrapper[5003]: I1206 15:34:35.432135 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-canary/ingress-canary-t2c5q" podStartSLOduration=8.432114637 podStartE2EDuration="8.432114637s" podCreationTimestamp="2025-12-06 15:34:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 15:34:35.430224956 +0000 UTC m=+153.963579347" watchObservedRunningTime="2025-12-06 15:34:35.432114637 +0000 UTC m=+153.965469018" Dec 06 15:34:35 crc kubenswrapper[5003]: I1206 15:34:35.432421 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca/service-ca-9c57cc56f-rrdrz" podStartSLOduration=129.432415925 podStartE2EDuration="2m9.432415925s" podCreationTimestamp="2025-12-06 15:32:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 15:34:35.41242538 +0000 UTC m=+153.945779761" watchObservedRunningTime="2025-12-06 15:34:35.432415925 +0000 UTC m=+153.965770306" Dec 06 15:34:35 crc kubenswrapper[5003]: I1206 15:34:35.434377 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 06 15:34:35 crc kubenswrapper[5003]: E1206 15:34:35.435073 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-06 15:34:35.935057606 +0000 UTC m=+154.468411987 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 15:34:35 crc kubenswrapper[5003]: I1206 15:34:35.477889 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-x6k88" event={"ID":"8e450a8e-52f9-48fe-96c8-8f444a7437fe","Type":"ContainerStarted","Data":"bb87b987e64f1f29a184a3b105cd76b1e1cfccdc085d509c34fa84ea25d2ddbe"} Dec 06 15:34:35 crc kubenswrapper[5003]: I1206 15:34:35.479861 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-fp6zg" event={"ID":"fb3e806c-c739-4628-bf9c-8745195fce4c","Type":"ContainerStarted","Data":"c9ec615c7079858d25126490834d802c074a1b0506bc1d923821988902ef4236"} Dec 06 15:34:35 crc kubenswrapper[5003]: I1206 15:34:35.481672 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-mhpsl" podStartSLOduration=129.481651225 podStartE2EDuration="2m9.481651225s" podCreationTimestamp="2025-12-06 15:32:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 15:34:35.480982407 +0000 UTC m=+154.014336808" watchObservedRunningTime="2025-12-06 15:34:35.481651225 +0000 UTC m=+154.015005606" Dec 06 15:34:35 crc kubenswrapper[5003]: I1206 15:34:35.485068 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29417250-pmq24" event={"ID":"37f5fb4c-d8e0-421a-a921-a88e7a934b3a","Type":"ContainerStarted","Data":"6122352c223eac78345784271f6e5682821a5be0c2a9fa7e65e1179875d8bc20"} Dec 06 15:34:35 crc kubenswrapper[5003]: I1206 15:34:35.515570 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-l8t2q" podStartSLOduration=129.515546475 podStartE2EDuration="2m9.515546475s" podCreationTimestamp="2025-12-06 15:32:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 15:34:35.513050258 +0000 UTC m=+154.046404649" watchObservedRunningTime="2025-12-06 15:34:35.515546475 +0000 UTC m=+154.048900876" Dec 06 15:34:35 crc kubenswrapper[5003]: I1206 15:34:35.521587 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-558db77b4-tsg4h" Dec 06 15:34:35 crc kubenswrapper[5003]: I1206 15:34:35.539273 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-tvwvh\" (UID: \"5cb1719e-962f-436c-bbc0-cd048de8dd14\") " pod="openshift-image-registry/image-registry-697d97f7c8-tvwvh" Dec 06 15:34:35 crc kubenswrapper[5003]: E1206 15:34:35.539790 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-06 15:34:36.039773134 +0000 UTC m=+154.573127515 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-tvwvh" (UID: "5cb1719e-962f-436c-bbc0-cd048de8dd14") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 15:34:35 crc kubenswrapper[5003]: I1206 15:34:35.572289 5003 patch_prober.go:28] interesting pod/router-default-5444994796-4qjqw container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 06 15:34:35 crc kubenswrapper[5003]: [-]has-synced failed: reason withheld Dec 06 15:34:35 crc kubenswrapper[5003]: [+]process-running ok Dec 06 15:34:35 crc kubenswrapper[5003]: healthz check failed Dec 06 15:34:35 crc kubenswrapper[5003]: I1206 15:34:35.572346 5003 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-4qjqw" podUID="b4d4786f-591a-43fb-afe1-04c8daa257a7" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 06 15:34:35 crc kubenswrapper[5003]: I1206 15:34:35.624721 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-pt2wd" podStartSLOduration=129.539773144 podStartE2EDuration="2m9.539773144s" podCreationTimestamp="2025-12-06 15:32:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 15:34:35.539570658 +0000 UTC m=+154.072925049" watchObservedRunningTime="2025-12-06 15:34:35.539773144 +0000 UTC m=+154.073127525" Dec 06 15:34:35 crc kubenswrapper[5003]: I1206 15:34:35.643157 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 06 15:34:35 crc kubenswrapper[5003]: E1206 15:34:35.643390 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-06 15:34:36.143370412 +0000 UTC m=+154.676724793 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 15:34:35 crc kubenswrapper[5003]: I1206 15:34:35.646251 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-tvwvh\" (UID: \"5cb1719e-962f-436c-bbc0-cd048de8dd14\") " pod="openshift-image-registry/image-registry-697d97f7c8-tvwvh" Dec 06 15:34:35 crc kubenswrapper[5003]: E1206 15:34:35.667008 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-06 15:34:36.166989575 +0000 UTC m=+154.700343956 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-tvwvh" (UID: "5cb1719e-962f-436c-bbc0-cd048de8dd14") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 15:34:35 crc kubenswrapper[5003]: I1206 15:34:35.750797 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 06 15:34:35 crc kubenswrapper[5003]: E1206 15:34:35.751203 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-06 15:34:36.251185153 +0000 UTC m=+154.784539524 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 15:34:35 crc kubenswrapper[5003]: I1206 15:34:35.852927 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-tvwvh\" (UID: \"5cb1719e-962f-436c-bbc0-cd048de8dd14\") " pod="openshift-image-registry/image-registry-697d97f7c8-tvwvh" Dec 06 15:34:35 crc kubenswrapper[5003]: E1206 15:34:35.853593 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-06 15:34:36.353578598 +0000 UTC m=+154.886932979 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-tvwvh" (UID: "5cb1719e-962f-436c-bbc0-cd048de8dd14") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 15:34:35 crc kubenswrapper[5003]: I1206 15:34:35.962038 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 06 15:34:35 crc kubenswrapper[5003]: E1206 15:34:35.962411 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-06 15:34:36.462391866 +0000 UTC m=+154.995746247 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 15:34:36 crc kubenswrapper[5003]: I1206 15:34:36.063129 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-tvwvh\" (UID: \"5cb1719e-962f-436c-bbc0-cd048de8dd14\") " pod="openshift-image-registry/image-registry-697d97f7c8-tvwvh" Dec 06 15:34:36 crc kubenswrapper[5003]: E1206 15:34:36.063483 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-06 15:34:36.563471166 +0000 UTC m=+155.096825547 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-tvwvh" (UID: "5cb1719e-962f-436c-bbc0-cd048de8dd14") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 15:34:36 crc kubenswrapper[5003]: I1206 15:34:36.258169 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 06 15:34:36 crc kubenswrapper[5003]: E1206 15:34:36.258732 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-06 15:34:36.758704181 +0000 UTC m=+155.292058562 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 15:34:36 crc kubenswrapper[5003]: I1206 15:34:36.258860 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-tvwvh\" (UID: \"5cb1719e-962f-436c-bbc0-cd048de8dd14\") " pod="openshift-image-registry/image-registry-697d97f7c8-tvwvh" Dec 06 15:34:36 crc kubenswrapper[5003]: E1206 15:34:36.259218 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-06 15:34:36.759207945 +0000 UTC m=+155.292562326 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-tvwvh" (UID: "5cb1719e-962f-436c-bbc0-cd048de8dd14") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 15:34:36 crc kubenswrapper[5003]: I1206 15:34:36.362746 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 06 15:34:36 crc kubenswrapper[5003]: E1206 15:34:36.363133 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-06 15:34:36.863113981 +0000 UTC m=+155.396468362 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 15:34:36 crc kubenswrapper[5003]: I1206 15:34:36.464282 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-tvwvh\" (UID: \"5cb1719e-962f-436c-bbc0-cd048de8dd14\") " pod="openshift-image-registry/image-registry-697d97f7c8-tvwvh" Dec 06 15:34:36 crc kubenswrapper[5003]: E1206 15:34:36.464681 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-06 15:34:36.964665414 +0000 UTC m=+155.498019795 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-tvwvh" (UID: "5cb1719e-962f-436c-bbc0-cd048de8dd14") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 15:34:36 crc kubenswrapper[5003]: I1206 15:34:36.490144 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-854zg" event={"ID":"dc7def17-767d-47b6-ad2f-4cf73e84ea5e","Type":"ContainerStarted","Data":"50d09ff2279ef6e5b534d2d47af6ca8d4955b98d615bd6b6e6d5a8915f5227c9"} Dec 06 15:34:36 crc kubenswrapper[5003]: I1206 15:34:36.492329 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-fp6zg" event={"ID":"fb3e806c-c739-4628-bf9c-8745195fce4c","Type":"ContainerStarted","Data":"c0e229aae111ad5eb830cb730980aa34a8033fa56943711f0f68724778911db7"} Dec 06 15:34:36 crc kubenswrapper[5003]: I1206 15:34:36.492376 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-fp6zg" event={"ID":"fb3e806c-c739-4628-bf9c-8745195fce4c","Type":"ContainerStarted","Data":"80307046ed6a1a4a69ed9ecf085fd6747b031ee292f6e859958547af197accc4"} Dec 06 15:34:36 crc kubenswrapper[5003]: I1206 15:34:36.493235 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-lwc4r" event={"ID":"765bb4a4-7c41-414b-a9be-a54be49b76ff","Type":"ContainerStarted","Data":"8983f81f3e009e763099d5f2f8745d5e96adc95d452468981c722677ee3c4f9d"} Dec 06 15:34:36 crc kubenswrapper[5003]: I1206 15:34:36.493730 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-lwc4r" Dec 06 15:34:36 crc kubenswrapper[5003]: I1206 15:34:36.494756 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-bk7j8" event={"ID":"4e8d2519-5570-4ac9-8968-3015f3658ef0","Type":"ContainerStarted","Data":"8e1b270eeccd0edd102fa3a64005380ef77e486da296ba520cb163b054329e69"} Dec 06 15:34:36 crc kubenswrapper[5003]: I1206 15:34:36.495826 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-p7hq2" event={"ID":"150d06be-be0f-4425-a584-760d19d009b5","Type":"ContainerStarted","Data":"fad8d1c28500aec73f16822338f2cfc72fa37c8a737d5eb30d66fadbc749bfb8"} Dec 06 15:34:36 crc kubenswrapper[5003]: I1206 15:34:36.496701 5003 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-lwc4r container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.29:8080/healthz\": dial tcp 10.217.0.29:8080: connect: connection refused" start-of-body= Dec 06 15:34:36 crc kubenswrapper[5003]: I1206 15:34:36.496764 5003 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-lwc4r" podUID="765bb4a4-7c41-414b-a9be-a54be49b76ff" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.29:8080/healthz\": dial tcp 10.217.0.29:8080: connect: connection refused" Dec 06 15:34:36 crc kubenswrapper[5003]: I1206 15:34:36.499409 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-wx2w9" event={"ID":"de472d67-6d24-44c5-becf-1cd20d390264","Type":"ContainerStarted","Data":"80b31c1c7c3dbd87e7de59bb1a7d558858470d44743e6dea8f3a1d93c4c4b055"} Dec 06 15:34:36 crc kubenswrapper[5003]: I1206 15:34:36.499556 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-dns/dns-default-wx2w9" Dec 06 15:34:36 crc kubenswrapper[5003]: I1206 15:34:36.501871 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-rjl56" event={"ID":"460fedd0-6ec4-4ef5-91ca-cc62ae21ebc4","Type":"ContainerStarted","Data":"1d710225e57fa8f202496d58c2ea3927bd52e7046b974af7a8e31ba514c7bddd"} Dec 06 15:34:36 crc kubenswrapper[5003]: I1206 15:34:36.503264 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-xs4nd" event={"ID":"495babf4-9201-4523-8a21-44e001d4f4c1","Type":"ContainerStarted","Data":"a4b8673986284aa4d67bd73f9296f6275fb16837811e41a8c29c9a8ceefa38fe"} Dec 06 15:34:36 crc kubenswrapper[5003]: I1206 15:34:36.503405 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/downloads-7954f5f757-xs4nd" Dec 06 15:34:36 crc kubenswrapper[5003]: I1206 15:34:36.505117 5003 patch_prober.go:28] interesting pod/downloads-7954f5f757-xs4nd container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.15:8080/\": dial tcp 10.217.0.15:8080: connect: connection refused" start-of-body= Dec 06 15:34:36 crc kubenswrapper[5003]: I1206 15:34:36.505162 5003 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-xs4nd" podUID="495babf4-9201-4523-8a21-44e001d4f4c1" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.15:8080/\": dial tcp 10.217.0.15:8080: connect: connection refused" Dec 06 15:34:36 crc kubenswrapper[5003]: I1206 15:34:36.517763 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-854zg" podStartSLOduration=130.517745267 podStartE2EDuration="2m10.517745267s" podCreationTimestamp="2025-12-06 15:32:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 15:34:36.51595619 +0000 UTC m=+155.049310571" watchObservedRunningTime="2025-12-06 15:34:36.517745267 +0000 UTC m=+155.051099648" Dec 06 15:34:36 crc kubenswrapper[5003]: I1206 15:34:36.521174 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-x6k88" event={"ID":"8e450a8e-52f9-48fe-96c8-8f444a7437fe","Type":"ContainerStarted","Data":"4247fc3794c2b2188e8311dfc8f7d45e0a0149fc0dacbab81116511f611c928f"} Dec 06 15:34:36 crc kubenswrapper[5003]: I1206 15:34:36.523852 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-7w4jb" event={"ID":"60490c57-c15a-4479-a735-257c6f60f1b0","Type":"ContainerStarted","Data":"ec4a578d25e71f41a7214926e57a686273f87431917ff29697c7db4ec2f24fa7"} Dec 06 15:34:36 crc kubenswrapper[5003]: I1206 15:34:36.523897 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-7w4jb" event={"ID":"60490c57-c15a-4479-a735-257c6f60f1b0","Type":"ContainerStarted","Data":"ddf84418a3d2d39740532e0311d2107dd9937764fcda0f1bd50f3022873efdc6"} Dec 06 15:34:36 crc kubenswrapper[5003]: I1206 15:34:36.526282 5003 generic.go:334] "Generic (PLEG): container finished" podID="a48bd0ed-4703-46dd-9586-6141cfe7b15e" containerID="d28df3da8a5cbdc3448cdf299ef9dd8c81b040c7167d498e81fc1e00585363d7" exitCode=0 Dec 06 15:34:36 crc kubenswrapper[5003]: I1206 15:34:36.526339 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-bvkqz" event={"ID":"a48bd0ed-4703-46dd-9586-6141cfe7b15e","Type":"ContainerDied","Data":"d28df3da8a5cbdc3448cdf299ef9dd8c81b040c7167d498e81fc1e00585363d7"} Dec 06 15:34:36 crc kubenswrapper[5003]: I1206 15:34:36.526357 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-bvkqz" event={"ID":"a48bd0ed-4703-46dd-9586-6141cfe7b15e","Type":"ContainerStarted","Data":"7bb045990b28c658983c8b148b57d4a9c594c96789661a69bbe94d994447657a"} Dec 06 15:34:36 crc kubenswrapper[5003]: I1206 15:34:36.526509 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-config-operator/openshift-config-operator-7777fb866f-bvkqz" Dec 06 15:34:36 crc kubenswrapper[5003]: I1206 15:34:36.527661 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29417250-pmq24" event={"ID":"37f5fb4c-d8e0-421a-a921-a88e7a934b3a","Type":"ContainerStarted","Data":"fc494c598e6b9f0ce7942c59b9123429642e62779f0b2cfc1d7e6441310a42a1"} Dec 06 15:34:36 crc kubenswrapper[5003]: I1206 15:34:36.530393 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-dc5hk" event={"ID":"0fca25a0-30bc-4906-8557-552531236ee4","Type":"ContainerStarted","Data":"0db51df9020e035a561a7f7125f605d4cfc5f059f0c8e5edae4b02f8b8cadc14"} Dec 06 15:34:36 crc kubenswrapper[5003]: I1206 15:34:36.531042 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-dc5hk" Dec 06 15:34:36 crc kubenswrapper[5003]: I1206 15:34:36.532450 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-vr8wv" event={"ID":"58dc4be2-81aa-4567-b800-1b77019a7eca","Type":"ContainerStarted","Data":"688965a8cc90b340b02c80be9b6965a40807022acb2a9187299b88b2f98c4e96"} Dec 06 15:34:36 crc kubenswrapper[5003]: I1206 15:34:36.535726 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-xvg94" event={"ID":"af89c7bf-ebbc-464e-b11a-c7343acbf887","Type":"ContainerStarted","Data":"118c2fcd065d60547811785b854e306c9496c38e5c8bdc163aeb9d421ab751e9"} Dec 06 15:34:36 crc kubenswrapper[5003]: I1206 15:34:36.535762 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-xvg94" event={"ID":"af89c7bf-ebbc-464e-b11a-c7343acbf887","Type":"ContainerStarted","Data":"1b3659a2fc07c19eab0193b5ff5c071ded0882b93559abc12f57561518ee156a"} Dec 06 15:34:36 crc kubenswrapper[5003]: I1206 15:34:36.537376 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-2qwm4" event={"ID":"9c863b8e-b487-438d-a745-e3f41c2ef92c","Type":"ContainerStarted","Data":"28a2471771af4ae2388d5ba12ad1d439207c4e0687caaf9d11d75eb383a1ec37"} Dec 06 15:34:36 crc kubenswrapper[5003]: I1206 15:34:36.538738 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8fpj2" event={"ID":"e6995aa9-a5fd-4994-bbc0-a6ed0b630fa1","Type":"ContainerStarted","Data":"08c94a882aa0293157efa2f8aa3483f61df335ba2721acd45f8cac531f12bb2f"} Dec 06 15:34:36 crc kubenswrapper[5003]: I1206 15:34:36.540385 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-gqgvg" event={"ID":"d32d51fd-e8e1-4bc7-a1b4-be5c851a8651","Type":"ContainerStarted","Data":"57518bde4404f7f7491e78462f047d064c6adaf52a6934e30012865f3e4a9552"} Dec 06 15:34:36 crc kubenswrapper[5003]: I1206 15:34:36.540437 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-gqgvg" event={"ID":"d32d51fd-e8e1-4bc7-a1b4-be5c851a8651","Type":"ContainerStarted","Data":"5ef436074e44ee1c66a22ffd0847e2e5e386b6efc1e63c275d4e18d8e2b46f5c"} Dec 06 15:34:36 crc kubenswrapper[5003]: I1206 15:34:36.540990 5003 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-mhpsl container/packageserver namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.26:5443/healthz\": dial tcp 10.217.0.26:5443: connect: connection refused" start-of-body= Dec 06 15:34:36 crc kubenswrapper[5003]: I1206 15:34:36.541028 5003 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-mhpsl" podUID="3a9d6180-dff8-46d9-92df-20ff4b1b466f" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.26:5443/healthz\": dial tcp 10.217.0.26:5443: connect: connection refused" Dec 06 15:34:36 crc kubenswrapper[5003]: I1206 15:34:36.542853 5003 patch_prober.go:28] interesting pod/catalog-operator-68c6474976-rqfnv container/catalog-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.24:8443/healthz\": dial tcp 10.217.0.24:8443: connect: connection refused" start-of-body= Dec 06 15:34:36 crc kubenswrapper[5003]: I1206 15:34:36.542884 5003 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-rqfnv" podUID="e109ae1a-d737-4150-9e67-728d9d8d32dc" containerName="catalog-operator" probeResult="failure" output="Get \"https://10.217.0.24:8443/healthz\": dial tcp 10.217.0.24:8443: connect: connection refused" Dec 06 15:34:36 crc kubenswrapper[5003]: I1206 15:34:36.546754 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/downloads-7954f5f757-xs4nd" podStartSLOduration=130.546736905 podStartE2EDuration="2m10.546736905s" podCreationTimestamp="2025-12-06 15:32:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 15:34:36.544725531 +0000 UTC m=+155.078079922" watchObservedRunningTime="2025-12-06 15:34:36.546736905 +0000 UTC m=+155.080091286" Dec 06 15:34:36 crc kubenswrapper[5003]: I1206 15:34:36.555594 5003 patch_prober.go:28] interesting pod/router-default-5444994796-4qjqw container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 06 15:34:36 crc kubenswrapper[5003]: [-]has-synced failed: reason withheld Dec 06 15:34:36 crc kubenswrapper[5003]: [+]process-running ok Dec 06 15:34:36 crc kubenswrapper[5003]: healthz check failed Dec 06 15:34:36 crc kubenswrapper[5003]: I1206 15:34:36.555683 5003 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-4qjqw" podUID="b4d4786f-591a-43fb-afe1-04c8daa257a7" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 06 15:34:36 crc kubenswrapper[5003]: I1206 15:34:36.565389 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 06 15:34:36 crc kubenswrapper[5003]: E1206 15:34:36.565682 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-06 15:34:37.065667772 +0000 UTC m=+155.599022143 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 15:34:36 crc kubenswrapper[5003]: I1206 15:34:36.573160 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/dns-default-wx2w9" podStartSLOduration=9.573145333 podStartE2EDuration="9.573145333s" podCreationTimestamp="2025-12-06 15:34:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 15:34:36.570853471 +0000 UTC m=+155.104207862" watchObservedRunningTime="2025-12-06 15:34:36.573145333 +0000 UTC m=+155.106499714" Dec 06 15:34:36 crc kubenswrapper[5003]: I1206 15:34:36.620649 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-pt2wd" Dec 06 15:34:36 crc kubenswrapper[5003]: I1206 15:34:36.645417 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd-operator/etcd-operator-b45778765-p7hq2" podStartSLOduration=130.64539399 podStartE2EDuration="2m10.64539399s" podCreationTimestamp="2025-12-06 15:32:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 15:34:36.611360017 +0000 UTC m=+155.144714398" watchObservedRunningTime="2025-12-06 15:34:36.64539399 +0000 UTC m=+155.178748371" Dec 06 15:34:36 crc kubenswrapper[5003]: I1206 15:34:36.667458 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-tvwvh\" (UID: \"5cb1719e-962f-436c-bbc0-cd048de8dd14\") " pod="openshift-image-registry/image-registry-697d97f7c8-tvwvh" Dec 06 15:34:36 crc kubenswrapper[5003]: E1206 15:34:36.672447 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-06 15:34:37.172428715 +0000 UTC m=+155.705783196 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-tvwvh" (UID: "5cb1719e-962f-436c-bbc0-cd048de8dd14") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 15:34:36 crc kubenswrapper[5003]: I1206 15:34:36.678566 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca-operator/service-ca-operator-777779d784-rjl56" podStartSLOduration=130.678545059 podStartE2EDuration="2m10.678545059s" podCreationTimestamp="2025-12-06 15:32:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 15:34:36.647045524 +0000 UTC m=+155.180399925" watchObservedRunningTime="2025-12-06 15:34:36.678545059 +0000 UTC m=+155.211899440" Dec 06 15:34:36 crc kubenswrapper[5003]: I1206 15:34:36.679101 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-lwc4r" podStartSLOduration=130.679096354 podStartE2EDuration="2m10.679096354s" podCreationTimestamp="2025-12-06 15:32:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 15:34:36.677055559 +0000 UTC m=+155.210409960" watchObservedRunningTime="2025-12-06 15:34:36.679096354 +0000 UTC m=+155.212450735" Dec 06 15:34:36 crc kubenswrapper[5003]: I1206 15:34:36.691853 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-admission-controller-857f4d67dd-fp6zg" podStartSLOduration=130.691832795 podStartE2EDuration="2m10.691832795s" podCreationTimestamp="2025-12-06 15:32:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 15:34:36.689901594 +0000 UTC m=+155.223255995" watchObservedRunningTime="2025-12-06 15:34:36.691832795 +0000 UTC m=+155.225187176" Dec 06 15:34:36 crc kubenswrapper[5003]: I1206 15:34:36.719126 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-2qwm4" podStartSLOduration=130.719107576 podStartE2EDuration="2m10.719107576s" podCreationTimestamp="2025-12-06 15:32:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 15:34:36.71887881 +0000 UTC m=+155.252233221" watchObservedRunningTime="2025-12-06 15:34:36.719107576 +0000 UTC m=+155.252461967" Dec 06 15:34:36 crc kubenswrapper[5003]: I1206 15:34:36.754910 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-xvg94" podStartSLOduration=130.754890816 podStartE2EDuration="2m10.754890816s" podCreationTimestamp="2025-12-06 15:32:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 15:34:36.753739495 +0000 UTC m=+155.287093906" watchObservedRunningTime="2025-12-06 15:34:36.754890816 +0000 UTC m=+155.288245197" Dec 06 15:34:36 crc kubenswrapper[5003]: I1206 15:34:36.773839 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 06 15:34:36 crc kubenswrapper[5003]: E1206 15:34:36.774181 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-06 15:34:37.274164333 +0000 UTC m=+155.807518714 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 15:34:36 crc kubenswrapper[5003]: I1206 15:34:36.795523 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-7w4jb" podStartSLOduration=130.795502434 podStartE2EDuration="2m10.795502434s" podCreationTimestamp="2025-12-06 15:32:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 15:34:36.795059173 +0000 UTC m=+155.328413584" watchObservedRunningTime="2025-12-06 15:34:36.795502434 +0000 UTC m=+155.328856815" Dec 06 15:34:36 crc kubenswrapper[5003]: I1206 15:34:36.870322 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-gqgvg" podStartSLOduration=130.87030077 podStartE2EDuration="2m10.87030077s" podCreationTimestamp="2025-12-06 15:32:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 15:34:36.823677871 +0000 UTC m=+155.357032272" watchObservedRunningTime="2025-12-06 15:34:36.87030077 +0000 UTC m=+155.403655161" Dec 06 15:34:36 crc kubenswrapper[5003]: I1206 15:34:36.870776 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8fpj2" podStartSLOduration=130.870768813 podStartE2EDuration="2m10.870768813s" podCreationTimestamp="2025-12-06 15:32:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 15:34:36.869940561 +0000 UTC m=+155.403294962" watchObservedRunningTime="2025-12-06 15:34:36.870768813 +0000 UTC m=+155.404123194" Dec 06 15:34:36 crc kubenswrapper[5003]: I1206 15:34:36.877733 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-tvwvh\" (UID: \"5cb1719e-962f-436c-bbc0-cd048de8dd14\") " pod="openshift-image-registry/image-registry-697d97f7c8-tvwvh" Dec 06 15:34:36 crc kubenswrapper[5003]: E1206 15:34:36.878154 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-06 15:34:37.378137091 +0000 UTC m=+155.911491462 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-tvwvh" (UID: "5cb1719e-962f-436c-bbc0-cd048de8dd14") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 15:34:36 crc kubenswrapper[5003]: I1206 15:34:36.943203 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29417250-pmq24" podStartSLOduration=131.943182964 podStartE2EDuration="2m11.943182964s" podCreationTimestamp="2025-12-06 15:32:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 15:34:36.939898887 +0000 UTC m=+155.473253278" watchObservedRunningTime="2025-12-06 15:34:36.943182964 +0000 UTC m=+155.476537355" Dec 06 15:34:36 crc kubenswrapper[5003]: I1206 15:34:36.979534 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 06 15:34:36 crc kubenswrapper[5003]: E1206 15:34:36.980031 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-06 15:34:37.480010712 +0000 UTC m=+156.013365103 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 15:34:37 crc kubenswrapper[5003]: I1206 15:34:37.008178 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-x6k88" podStartSLOduration=131.008157976 podStartE2EDuration="2m11.008157976s" podCreationTimestamp="2025-12-06 15:32:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 15:34:36.994861371 +0000 UTC m=+155.528215762" watchObservedRunningTime="2025-12-06 15:34:37.008157976 +0000 UTC m=+155.541512357" Dec 06 15:34:37 crc kubenswrapper[5003]: I1206 15:34:37.080532 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-tvwvh\" (UID: \"5cb1719e-962f-436c-bbc0-cd048de8dd14\") " pod="openshift-image-registry/image-registry-697d97f7c8-tvwvh" Dec 06 15:34:37 crc kubenswrapper[5003]: E1206 15:34:37.080955 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-06 15:34:37.580939428 +0000 UTC m=+156.114293809 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-tvwvh" (UID: "5cb1719e-962f-436c-bbc0-cd048de8dd14") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 15:34:37 crc kubenswrapper[5003]: I1206 15:34:37.119228 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-config-operator/openshift-config-operator-7777fb866f-bvkqz" podStartSLOduration=131.119208374 podStartE2EDuration="2m11.119208374s" podCreationTimestamp="2025-12-06 15:32:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 15:34:37.117713184 +0000 UTC m=+155.651067585" watchObservedRunningTime="2025-12-06 15:34:37.119208374 +0000 UTC m=+155.652562755" Dec 06 15:34:37 crc kubenswrapper[5003]: I1206 15:34:37.119719 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-dc5hk" podStartSLOduration=131.119713218 podStartE2EDuration="2m11.119713218s" podCreationTimestamp="2025-12-06 15:32:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 15:34:37.072634705 +0000 UTC m=+155.605989106" watchObservedRunningTime="2025-12-06 15:34:37.119713218 +0000 UTC m=+155.653067599" Dec 06 15:34:37 crc kubenswrapper[5003]: I1206 15:34:37.149784 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-vr8wv" podStartSLOduration=131.149764024 podStartE2EDuration="2m11.149764024s" podCreationTimestamp="2025-12-06 15:32:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 15:34:37.149651891 +0000 UTC m=+155.683006272" watchObservedRunningTime="2025-12-06 15:34:37.149764024 +0000 UTC m=+155.683118405" Dec 06 15:34:37 crc kubenswrapper[5003]: I1206 15:34:37.182052 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 06 15:34:37 crc kubenswrapper[5003]: E1206 15:34:37.182224 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-06 15:34:37.682198533 +0000 UTC m=+156.215552914 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 15:34:37 crc kubenswrapper[5003]: I1206 15:34:37.182277 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-tvwvh\" (UID: \"5cb1719e-962f-436c-bbc0-cd048de8dd14\") " pod="openshift-image-registry/image-registry-697d97f7c8-tvwvh" Dec 06 15:34:37 crc kubenswrapper[5003]: E1206 15:34:37.182590 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-06 15:34:37.682582234 +0000 UTC m=+156.215936615 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-tvwvh" (UID: "5cb1719e-962f-436c-bbc0-cd048de8dd14") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 15:34:37 crc kubenswrapper[5003]: I1206 15:34:37.283400 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 06 15:34:37 crc kubenswrapper[5003]: E1206 15:34:37.283673 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-06 15:34:37.783655204 +0000 UTC m=+156.317009585 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 15:34:37 crc kubenswrapper[5003]: I1206 15:34:37.384843 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-tvwvh\" (UID: \"5cb1719e-962f-436c-bbc0-cd048de8dd14\") " pod="openshift-image-registry/image-registry-697d97f7c8-tvwvh" Dec 06 15:34:37 crc kubenswrapper[5003]: E1206 15:34:37.385200 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-06 15:34:37.885184686 +0000 UTC m=+156.418539067 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-tvwvh" (UID: "5cb1719e-962f-436c-bbc0-cd048de8dd14") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 15:34:37 crc kubenswrapper[5003]: I1206 15:34:37.485985 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 06 15:34:37 crc kubenswrapper[5003]: E1206 15:34:37.486185 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-06 15:34:37.986154634 +0000 UTC m=+156.519509015 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 15:34:37 crc kubenswrapper[5003]: I1206 15:34:37.486305 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-tvwvh\" (UID: \"5cb1719e-962f-436c-bbc0-cd048de8dd14\") " pod="openshift-image-registry/image-registry-697d97f7c8-tvwvh" Dec 06 15:34:37 crc kubenswrapper[5003]: E1206 15:34:37.486747 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-06 15:34:37.986735119 +0000 UTC m=+156.520089570 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-tvwvh" (UID: "5cb1719e-962f-436c-bbc0-cd048de8dd14") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 15:34:37 crc kubenswrapper[5003]: I1206 15:34:37.549115 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-bk7j8" event={"ID":"4e8d2519-5570-4ac9-8968-3015f3658ef0","Type":"ContainerStarted","Data":"fcee68386a2bc7e4ad8457b24ec1603263e0c1498474fec918104a35716dc27f"} Dec 06 15:34:37 crc kubenswrapper[5003]: I1206 15:34:37.549626 5003 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-lwc4r container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.29:8080/healthz\": dial tcp 10.217.0.29:8080: connect: connection refused" start-of-body= Dec 06 15:34:37 crc kubenswrapper[5003]: I1206 15:34:37.549657 5003 patch_prober.go:28] interesting pod/downloads-7954f5f757-xs4nd container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.15:8080/\": dial tcp 10.217.0.15:8080: connect: connection refused" start-of-body= Dec 06 15:34:37 crc kubenswrapper[5003]: I1206 15:34:37.549736 5003 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-lwc4r" podUID="765bb4a4-7c41-414b-a9be-a54be49b76ff" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.29:8080/healthz\": dial tcp 10.217.0.29:8080: connect: connection refused" Dec 06 15:34:37 crc kubenswrapper[5003]: I1206 15:34:37.549764 5003 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-xs4nd" podUID="495babf4-9201-4523-8a21-44e001d4f4c1" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.15:8080/\": dial tcp 10.217.0.15:8080: connect: connection refused" Dec 06 15:34:37 crc kubenswrapper[5003]: I1206 15:34:37.553814 5003 patch_prober.go:28] interesting pod/router-default-5444994796-4qjqw container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 06 15:34:37 crc kubenswrapper[5003]: [-]has-synced failed: reason withheld Dec 06 15:34:37 crc kubenswrapper[5003]: [+]process-running ok Dec 06 15:34:37 crc kubenswrapper[5003]: healthz check failed Dec 06 15:34:37 crc kubenswrapper[5003]: I1206 15:34:37.553862 5003 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-4qjqw" podUID="b4d4786f-591a-43fb-afe1-04c8daa257a7" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 06 15:34:37 crc kubenswrapper[5003]: I1206 15:34:37.565280 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-rqfnv" Dec 06 15:34:37 crc kubenswrapper[5003]: I1206 15:34:37.587590 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 06 15:34:37 crc kubenswrapper[5003]: E1206 15:34:37.588653 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-06 15:34:38.088630682 +0000 UTC m=+156.621985063 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 15:34:37 crc kubenswrapper[5003]: I1206 15:34:37.690384 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-tvwvh\" (UID: \"5cb1719e-962f-436c-bbc0-cd048de8dd14\") " pod="openshift-image-registry/image-registry-697d97f7c8-tvwvh" Dec 06 15:34:37 crc kubenswrapper[5003]: E1206 15:34:37.690833 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-06 15:34:38.190812471 +0000 UTC m=+156.724166852 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-tvwvh" (UID: "5cb1719e-962f-436c-bbc0-cd048de8dd14") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 15:34:37 crc kubenswrapper[5003]: I1206 15:34:37.768478 5003 plugin_watcher.go:194] "Adding socket path or updating timestamp to desired state cache" path="/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock" Dec 06 15:34:37 crc kubenswrapper[5003]: I1206 15:34:37.792117 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 06 15:34:37 crc kubenswrapper[5003]: E1206 15:34:37.792307 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-06 15:34:38.292288752 +0000 UTC m=+156.825643133 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 15:34:37 crc kubenswrapper[5003]: I1206 15:34:37.792388 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-tvwvh\" (UID: \"5cb1719e-962f-436c-bbc0-cd048de8dd14\") " pod="openshift-image-registry/image-registry-697d97f7c8-tvwvh" Dec 06 15:34:37 crc kubenswrapper[5003]: E1206 15:34:37.792754 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-06 15:34:38.292733564 +0000 UTC m=+156.826087945 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-tvwvh" (UID: "5cb1719e-962f-436c-bbc0-cd048de8dd14") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 15:34:37 crc kubenswrapper[5003]: I1206 15:34:37.893380 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 06 15:34:37 crc kubenswrapper[5003]: E1206 15:34:37.893580 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-06 15:34:38.393554227 +0000 UTC m=+156.926908608 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 15:34:37 crc kubenswrapper[5003]: I1206 15:34:37.893643 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-tvwvh\" (UID: \"5cb1719e-962f-436c-bbc0-cd048de8dd14\") " pod="openshift-image-registry/image-registry-697d97f7c8-tvwvh" Dec 06 15:34:37 crc kubenswrapper[5003]: E1206 15:34:37.893943 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-06 15:34:38.393936638 +0000 UTC m=+156.927291009 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-tvwvh" (UID: "5cb1719e-962f-436c-bbc0-cd048de8dd14") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 15:34:37 crc kubenswrapper[5003]: I1206 15:34:37.995010 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 06 15:34:37 crc kubenswrapper[5003]: E1206 15:34:37.995428 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-06 15:34:38.495398399 +0000 UTC m=+157.028752790 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 15:34:37 crc kubenswrapper[5003]: I1206 15:34:37.995521 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-tvwvh\" (UID: \"5cb1719e-962f-436c-bbc0-cd048de8dd14\") " pod="openshift-image-registry/image-registry-697d97f7c8-tvwvh" Dec 06 15:34:37 crc kubenswrapper[5003]: E1206 15:34:37.995897 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-06 15:34:38.495880991 +0000 UTC m=+157.029235452 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-tvwvh" (UID: "5cb1719e-962f-436c-bbc0-cd048de8dd14") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 15:34:38 crc kubenswrapper[5003]: I1206 15:34:38.060975 5003 reconciler.go:161] "OperationExecutor.RegisterPlugin started" plugin={"SocketPath":"/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock","Timestamp":"2025-12-06T15:34:37.768537476Z","Handler":null,"Name":""} Dec 06 15:34:38 crc kubenswrapper[5003]: I1206 15:34:38.070069 5003 csi_plugin.go:100] kubernetes.io/csi: Trying to validate a new CSI Driver with name: kubevirt.io.hostpath-provisioner endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock versions: 1.0.0 Dec 06 15:34:38 crc kubenswrapper[5003]: I1206 15:34:38.070129 5003 csi_plugin.go:113] kubernetes.io/csi: Register new plugin with name: kubevirt.io.hostpath-provisioner at endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock Dec 06 15:34:38 crc kubenswrapper[5003]: I1206 15:34:38.097127 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 06 15:34:38 crc kubenswrapper[5003]: I1206 15:34:38.115598 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Dec 06 15:34:38 crc kubenswrapper[5003]: I1206 15:34:38.198172 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-tvwvh\" (UID: \"5cb1719e-962f-436c-bbc0-cd048de8dd14\") " pod="openshift-image-registry/image-registry-697d97f7c8-tvwvh" Dec 06 15:34:38 crc kubenswrapper[5003]: I1206 15:34:38.208680 5003 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 06 15:34:38 crc kubenswrapper[5003]: I1206 15:34:38.208742 5003 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-tvwvh\" (UID: \"5cb1719e-962f-436c-bbc0-cd048de8dd14\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount\"" pod="openshift-image-registry/image-registry-697d97f7c8-tvwvh" Dec 06 15:34:38 crc kubenswrapper[5003]: I1206 15:34:38.269276 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-tvwvh\" (UID: \"5cb1719e-962f-436c-bbc0-cd048de8dd14\") " pod="openshift-image-registry/image-registry-697d97f7c8-tvwvh" Dec 06 15:34:38 crc kubenswrapper[5003]: I1206 15:34:38.389190 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-tvwvh" Dec 06 15:34:38 crc kubenswrapper[5003]: I1206 15:34:38.458373 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-apiserver/apiserver-76f77b778f-jx64p" Dec 06 15:34:38 crc kubenswrapper[5003]: I1206 15:34:38.476094 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-mhpsl" Dec 06 15:34:38 crc kubenswrapper[5003]: I1206 15:34:38.482401 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-apiserver/apiserver-76f77b778f-jx64p" Dec 06 15:34:38 crc kubenswrapper[5003]: I1206 15:34:38.555022 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-bk7j8" event={"ID":"4e8d2519-5570-4ac9-8968-3015f3658ef0","Type":"ContainerStarted","Data":"9d341843a149cb2869ab570d9e75bbe4cffa0578ea6c43bd281676f2e940aaf9"} Dec 06 15:34:38 crc kubenswrapper[5003]: I1206 15:34:38.555063 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-bk7j8" event={"ID":"4e8d2519-5570-4ac9-8968-3015f3658ef0","Type":"ContainerStarted","Data":"f08a0cc038a43d3984c6f406d9a80fca996b5be3bf4f66a0d31e8ebdc2da5b12"} Dec 06 15:34:38 crc kubenswrapper[5003]: I1206 15:34:38.556660 5003 patch_prober.go:28] interesting pod/router-default-5444994796-4qjqw container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 06 15:34:38 crc kubenswrapper[5003]: [-]has-synced failed: reason withheld Dec 06 15:34:38 crc kubenswrapper[5003]: [+]process-running ok Dec 06 15:34:38 crc kubenswrapper[5003]: healthz check failed Dec 06 15:34:38 crc kubenswrapper[5003]: I1206 15:34:38.556694 5003 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-4qjqw" podUID="b4d4786f-591a-43fb-afe1-04c8daa257a7" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 06 15:34:38 crc kubenswrapper[5003]: I1206 15:34:38.566164 5003 generic.go:334] "Generic (PLEG): container finished" podID="37f5fb4c-d8e0-421a-a921-a88e7a934b3a" containerID="fc494c598e6b9f0ce7942c59b9123429642e62779f0b2cfc1d7e6441310a42a1" exitCode=0 Dec 06 15:34:38 crc kubenswrapper[5003]: I1206 15:34:38.566784 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29417250-pmq24" event={"ID":"37f5fb4c-d8e0-421a-a921-a88e7a934b3a","Type":"ContainerDied","Data":"fc494c598e6b9f0ce7942c59b9123429642e62779f0b2cfc1d7e6441310a42a1"} Dec 06 15:34:38 crc kubenswrapper[5003]: I1206 15:34:38.976966 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="hostpath-provisioner/csi-hostpathplugin-bk7j8" podStartSLOduration=11.976946377 podStartE2EDuration="11.976946377s" podCreationTimestamp="2025-12-06 15:34:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 15:34:38.674011605 +0000 UTC m=+157.207365986" watchObservedRunningTime="2025-12-06 15:34:38.976946377 +0000 UTC m=+157.510300758" Dec 06 15:34:38 crc kubenswrapper[5003]: I1206 15:34:38.978943 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-jqxj7"] Dec 06 15:34:38 crc kubenswrapper[5003]: I1206 15:34:38.979883 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-jqxj7" Dec 06 15:34:39 crc kubenswrapper[5003]: I1206 15:34:39.006015 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-tvwvh"] Dec 06 15:34:39 crc kubenswrapper[5003]: W1206 15:34:39.006466 5003 reflector.go:561] object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g": failed to list *v1.Secret: secrets "certified-operators-dockercfg-4rs5g" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-marketplace": no relationship found between node 'crc' and this object Dec 06 15:34:39 crc kubenswrapper[5003]: E1206 15:34:39.006508 5003 reflector.go:158] "Unhandled Error" err="object-\"openshift-marketplace\"/\"certified-operators-dockercfg-4rs5g\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"certified-operators-dockercfg-4rs5g\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-marketplace\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 06 15:34:39 crc kubenswrapper[5003]: I1206 15:34:39.074873 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zw5mx\" (UniqueName: \"kubernetes.io/projected/dde2226a-d12b-4c3b-a396-cf72781488ca-kube-api-access-zw5mx\") pod \"certified-operators-jqxj7\" (UID: \"dde2226a-d12b-4c3b-a396-cf72781488ca\") " pod="openshift-marketplace/certified-operators-jqxj7" Dec 06 15:34:39 crc kubenswrapper[5003]: I1206 15:34:39.075028 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dde2226a-d12b-4c3b-a396-cf72781488ca-utilities\") pod \"certified-operators-jqxj7\" (UID: \"dde2226a-d12b-4c3b-a396-cf72781488ca\") " pod="openshift-marketplace/certified-operators-jqxj7" Dec 06 15:34:39 crc kubenswrapper[5003]: I1206 15:34:39.075141 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dde2226a-d12b-4c3b-a396-cf72781488ca-catalog-content\") pod \"certified-operators-jqxj7\" (UID: \"dde2226a-d12b-4c3b-a396-cf72781488ca\") " pod="openshift-marketplace/certified-operators-jqxj7" Dec 06 15:34:39 crc kubenswrapper[5003]: I1206 15:34:39.137966 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-gtwdh"] Dec 06 15:34:39 crc kubenswrapper[5003]: I1206 15:34:39.139190 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-gtwdh" Dec 06 15:34:39 crc kubenswrapper[5003]: I1206 15:34:39.141340 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Dec 06 15:34:39 crc kubenswrapper[5003]: I1206 15:34:39.155574 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-gtwdh"] Dec 06 15:34:39 crc kubenswrapper[5003]: I1206 15:34:39.164080 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-jqxj7"] Dec 06 15:34:39 crc kubenswrapper[5003]: I1206 15:34:39.176162 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dde2226a-d12b-4c3b-a396-cf72781488ca-utilities\") pod \"certified-operators-jqxj7\" (UID: \"dde2226a-d12b-4c3b-a396-cf72781488ca\") " pod="openshift-marketplace/certified-operators-jqxj7" Dec 06 15:34:39 crc kubenswrapper[5003]: I1206 15:34:39.176224 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dde2226a-d12b-4c3b-a396-cf72781488ca-catalog-content\") pod \"certified-operators-jqxj7\" (UID: \"dde2226a-d12b-4c3b-a396-cf72781488ca\") " pod="openshift-marketplace/certified-operators-jqxj7" Dec 06 15:34:39 crc kubenswrapper[5003]: I1206 15:34:39.176261 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6c5a30dc-06ca-435f-81b9-576f03f05a19-catalog-content\") pod \"community-operators-gtwdh\" (UID: \"6c5a30dc-06ca-435f-81b9-576f03f05a19\") " pod="openshift-marketplace/community-operators-gtwdh" Dec 06 15:34:39 crc kubenswrapper[5003]: I1206 15:34:39.176283 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d788v\" (UniqueName: \"kubernetes.io/projected/6c5a30dc-06ca-435f-81b9-576f03f05a19-kube-api-access-d788v\") pod \"community-operators-gtwdh\" (UID: \"6c5a30dc-06ca-435f-81b9-576f03f05a19\") " pod="openshift-marketplace/community-operators-gtwdh" Dec 06 15:34:39 crc kubenswrapper[5003]: I1206 15:34:39.176316 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6c5a30dc-06ca-435f-81b9-576f03f05a19-utilities\") pod \"community-operators-gtwdh\" (UID: \"6c5a30dc-06ca-435f-81b9-576f03f05a19\") " pod="openshift-marketplace/community-operators-gtwdh" Dec 06 15:34:39 crc kubenswrapper[5003]: I1206 15:34:39.176353 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zw5mx\" (UniqueName: \"kubernetes.io/projected/dde2226a-d12b-4c3b-a396-cf72781488ca-kube-api-access-zw5mx\") pod \"certified-operators-jqxj7\" (UID: \"dde2226a-d12b-4c3b-a396-cf72781488ca\") " pod="openshift-marketplace/certified-operators-jqxj7" Dec 06 15:34:39 crc kubenswrapper[5003]: I1206 15:34:39.177140 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dde2226a-d12b-4c3b-a396-cf72781488ca-utilities\") pod \"certified-operators-jqxj7\" (UID: \"dde2226a-d12b-4c3b-a396-cf72781488ca\") " pod="openshift-marketplace/certified-operators-jqxj7" Dec 06 15:34:39 crc kubenswrapper[5003]: I1206 15:34:39.177418 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dde2226a-d12b-4c3b-a396-cf72781488ca-catalog-content\") pod \"certified-operators-jqxj7\" (UID: \"dde2226a-d12b-4c3b-a396-cf72781488ca\") " pod="openshift-marketplace/certified-operators-jqxj7" Dec 06 15:34:39 crc kubenswrapper[5003]: I1206 15:34:39.218675 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zw5mx\" (UniqueName: \"kubernetes.io/projected/dde2226a-d12b-4c3b-a396-cf72781488ca-kube-api-access-zw5mx\") pod \"certified-operators-jqxj7\" (UID: \"dde2226a-d12b-4c3b-a396-cf72781488ca\") " pod="openshift-marketplace/certified-operators-jqxj7" Dec 06 15:34:39 crc kubenswrapper[5003]: I1206 15:34:39.222016 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-config-operator/openshift-config-operator-7777fb866f-bvkqz" Dec 06 15:34:39 crc kubenswrapper[5003]: I1206 15:34:39.279277 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6c5a30dc-06ca-435f-81b9-576f03f05a19-catalog-content\") pod \"community-operators-gtwdh\" (UID: \"6c5a30dc-06ca-435f-81b9-576f03f05a19\") " pod="openshift-marketplace/community-operators-gtwdh" Dec 06 15:34:39 crc kubenswrapper[5003]: I1206 15:34:39.279339 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d788v\" (UniqueName: \"kubernetes.io/projected/6c5a30dc-06ca-435f-81b9-576f03f05a19-kube-api-access-d788v\") pod \"community-operators-gtwdh\" (UID: \"6c5a30dc-06ca-435f-81b9-576f03f05a19\") " pod="openshift-marketplace/community-operators-gtwdh" Dec 06 15:34:39 crc kubenswrapper[5003]: I1206 15:34:39.279370 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6c5a30dc-06ca-435f-81b9-576f03f05a19-utilities\") pod \"community-operators-gtwdh\" (UID: \"6c5a30dc-06ca-435f-81b9-576f03f05a19\") " pod="openshift-marketplace/community-operators-gtwdh" Dec 06 15:34:39 crc kubenswrapper[5003]: I1206 15:34:39.281308 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6c5a30dc-06ca-435f-81b9-576f03f05a19-catalog-content\") pod \"community-operators-gtwdh\" (UID: \"6c5a30dc-06ca-435f-81b9-576f03f05a19\") " pod="openshift-marketplace/community-operators-gtwdh" Dec 06 15:34:39 crc kubenswrapper[5003]: I1206 15:34:39.281421 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6c5a30dc-06ca-435f-81b9-576f03f05a19-utilities\") pod \"community-operators-gtwdh\" (UID: \"6c5a30dc-06ca-435f-81b9-576f03f05a19\") " pod="openshift-marketplace/community-operators-gtwdh" Dec 06 15:34:39 crc kubenswrapper[5003]: I1206 15:34:39.405055 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-x4xqj"] Dec 06 15:34:39 crc kubenswrapper[5003]: I1206 15:34:39.406746 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-x4xqj" Dec 06 15:34:39 crc kubenswrapper[5003]: I1206 15:34:39.409152 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-x4xqj"] Dec 06 15:34:39 crc kubenswrapper[5003]: I1206 15:34:39.416951 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d788v\" (UniqueName: \"kubernetes.io/projected/6c5a30dc-06ca-435f-81b9-576f03f05a19-kube-api-access-d788v\") pod \"community-operators-gtwdh\" (UID: \"6c5a30dc-06ca-435f-81b9-576f03f05a19\") " pod="openshift-marketplace/community-operators-gtwdh" Dec 06 15:34:39 crc kubenswrapper[5003]: I1206 15:34:39.510710 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-gtwdh" Dec 06 15:34:39 crc kubenswrapper[5003]: I1206 15:34:39.511830 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d2scn\" (UniqueName: \"kubernetes.io/projected/78c47ecf-a4ad-4ac0-85f9-66fa3c8651b4-kube-api-access-d2scn\") pod \"certified-operators-x4xqj\" (UID: \"78c47ecf-a4ad-4ac0-85f9-66fa3c8651b4\") " pod="openshift-marketplace/certified-operators-x4xqj" Dec 06 15:34:39 crc kubenswrapper[5003]: I1206 15:34:39.511906 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/78c47ecf-a4ad-4ac0-85f9-66fa3c8651b4-utilities\") pod \"certified-operators-x4xqj\" (UID: \"78c47ecf-a4ad-4ac0-85f9-66fa3c8651b4\") " pod="openshift-marketplace/certified-operators-x4xqj" Dec 06 15:34:39 crc kubenswrapper[5003]: I1206 15:34:39.511965 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/78c47ecf-a4ad-4ac0-85f9-66fa3c8651b4-catalog-content\") pod \"certified-operators-x4xqj\" (UID: \"78c47ecf-a4ad-4ac0-85f9-66fa3c8651b4\") " pod="openshift-marketplace/certified-operators-x4xqj" Dec 06 15:34:39 crc kubenswrapper[5003]: I1206 15:34:39.536431 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-x2bhv"] Dec 06 15:34:39 crc kubenswrapper[5003]: I1206 15:34:39.537339 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-x2bhv" Dec 06 15:34:39 crc kubenswrapper[5003]: I1206 15:34:39.557112 5003 patch_prober.go:28] interesting pod/router-default-5444994796-4qjqw container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 06 15:34:39 crc kubenswrapper[5003]: [-]has-synced failed: reason withheld Dec 06 15:34:39 crc kubenswrapper[5003]: [+]process-running ok Dec 06 15:34:39 crc kubenswrapper[5003]: healthz check failed Dec 06 15:34:39 crc kubenswrapper[5003]: I1206 15:34:39.557215 5003 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-4qjqw" podUID="b4d4786f-591a-43fb-afe1-04c8daa257a7" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 06 15:34:39 crc kubenswrapper[5003]: I1206 15:34:39.566167 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-x2bhv"] Dec 06 15:34:39 crc kubenswrapper[5003]: I1206 15:34:39.583178 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-tvwvh" event={"ID":"5cb1719e-962f-436c-bbc0-cd048de8dd14","Type":"ContainerStarted","Data":"1c712938771ac6a053daa14ba2940540518c7ddb9054b917da1d98c13fed6044"} Dec 06 15:34:39 crc kubenswrapper[5003]: I1206 15:34:39.583557 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-tvwvh" event={"ID":"5cb1719e-962f-436c-bbc0-cd048de8dd14","Type":"ContainerStarted","Data":"90494900ff5d14fe879dcc8bcbf8607bb3ec4c745033e0b46a897ac5286fe76c"} Dec 06 15:34:39 crc kubenswrapper[5003]: I1206 15:34:39.584076 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-697d97f7c8-tvwvh" Dec 06 15:34:39 crc kubenswrapper[5003]: I1206 15:34:39.613084 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d2scn\" (UniqueName: \"kubernetes.io/projected/78c47ecf-a4ad-4ac0-85f9-66fa3c8651b4-kube-api-access-d2scn\") pod \"certified-operators-x4xqj\" (UID: \"78c47ecf-a4ad-4ac0-85f9-66fa3c8651b4\") " pod="openshift-marketplace/certified-operators-x4xqj" Dec 06 15:34:39 crc kubenswrapper[5003]: I1206 15:34:39.613349 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/78c47ecf-a4ad-4ac0-85f9-66fa3c8651b4-utilities\") pod \"certified-operators-x4xqj\" (UID: \"78c47ecf-a4ad-4ac0-85f9-66fa3c8651b4\") " pod="openshift-marketplace/certified-operators-x4xqj" Dec 06 15:34:39 crc kubenswrapper[5003]: I1206 15:34:39.613461 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/78c47ecf-a4ad-4ac0-85f9-66fa3c8651b4-catalog-content\") pod \"certified-operators-x4xqj\" (UID: \"78c47ecf-a4ad-4ac0-85f9-66fa3c8651b4\") " pod="openshift-marketplace/certified-operators-x4xqj" Dec 06 15:34:39 crc kubenswrapper[5003]: I1206 15:34:39.614021 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/78c47ecf-a4ad-4ac0-85f9-66fa3c8651b4-catalog-content\") pod \"certified-operators-x4xqj\" (UID: \"78c47ecf-a4ad-4ac0-85f9-66fa3c8651b4\") " pod="openshift-marketplace/certified-operators-x4xqj" Dec 06 15:34:39 crc kubenswrapper[5003]: I1206 15:34:39.614309 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/78c47ecf-a4ad-4ac0-85f9-66fa3c8651b4-utilities\") pod \"certified-operators-x4xqj\" (UID: \"78c47ecf-a4ad-4ac0-85f9-66fa3c8651b4\") " pod="openshift-marketplace/certified-operators-x4xqj" Dec 06 15:34:39 crc kubenswrapper[5003]: I1206 15:34:39.621947 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-697d97f7c8-tvwvh" podStartSLOduration=133.621926171 podStartE2EDuration="2m13.621926171s" podCreationTimestamp="2025-12-06 15:32:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 15:34:39.62185714 +0000 UTC m=+158.155211521" watchObservedRunningTime="2025-12-06 15:34:39.621926171 +0000 UTC m=+158.155280552" Dec 06 15:34:39 crc kubenswrapper[5003]: I1206 15:34:39.647396 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d2scn\" (UniqueName: \"kubernetes.io/projected/78c47ecf-a4ad-4ac0-85f9-66fa3c8651b4-kube-api-access-d2scn\") pod \"certified-operators-x4xqj\" (UID: \"78c47ecf-a4ad-4ac0-85f9-66fa3c8651b4\") " pod="openshift-marketplace/certified-operators-x4xqj" Dec 06 15:34:39 crc kubenswrapper[5003]: I1206 15:34:39.715394 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a73c8333-dbdd-447b-b940-5aca2f15d00d-utilities\") pod \"community-operators-x2bhv\" (UID: \"a73c8333-dbdd-447b-b940-5aca2f15d00d\") " pod="openshift-marketplace/community-operators-x2bhv" Dec 06 15:34:39 crc kubenswrapper[5003]: I1206 15:34:39.715460 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tsk7n\" (UniqueName: \"kubernetes.io/projected/a73c8333-dbdd-447b-b940-5aca2f15d00d-kube-api-access-tsk7n\") pod \"community-operators-x2bhv\" (UID: \"a73c8333-dbdd-447b-b940-5aca2f15d00d\") " pod="openshift-marketplace/community-operators-x2bhv" Dec 06 15:34:39 crc kubenswrapper[5003]: I1206 15:34:39.715557 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a73c8333-dbdd-447b-b940-5aca2f15d00d-catalog-content\") pod \"community-operators-x2bhv\" (UID: \"a73c8333-dbdd-447b-b940-5aca2f15d00d\") " pod="openshift-marketplace/community-operators-x2bhv" Dec 06 15:34:39 crc kubenswrapper[5003]: I1206 15:34:39.720073 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8f668bae-612b-4b75-9490-919e737c6a3b" path="/var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes" Dec 06 15:34:39 crc kubenswrapper[5003]: I1206 15:34:39.817800 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a73c8333-dbdd-447b-b940-5aca2f15d00d-utilities\") pod \"community-operators-x2bhv\" (UID: \"a73c8333-dbdd-447b-b940-5aca2f15d00d\") " pod="openshift-marketplace/community-operators-x2bhv" Dec 06 15:34:39 crc kubenswrapper[5003]: I1206 15:34:39.817876 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tsk7n\" (UniqueName: \"kubernetes.io/projected/a73c8333-dbdd-447b-b940-5aca2f15d00d-kube-api-access-tsk7n\") pod \"community-operators-x2bhv\" (UID: \"a73c8333-dbdd-447b-b940-5aca2f15d00d\") " pod="openshift-marketplace/community-operators-x2bhv" Dec 06 15:34:39 crc kubenswrapper[5003]: I1206 15:34:39.817943 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a73c8333-dbdd-447b-b940-5aca2f15d00d-catalog-content\") pod \"community-operators-x2bhv\" (UID: \"a73c8333-dbdd-447b-b940-5aca2f15d00d\") " pod="openshift-marketplace/community-operators-x2bhv" Dec 06 15:34:39 crc kubenswrapper[5003]: I1206 15:34:39.818505 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a73c8333-dbdd-447b-b940-5aca2f15d00d-catalog-content\") pod \"community-operators-x2bhv\" (UID: \"a73c8333-dbdd-447b-b940-5aca2f15d00d\") " pod="openshift-marketplace/community-operators-x2bhv" Dec 06 15:34:39 crc kubenswrapper[5003]: I1206 15:34:39.818681 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a73c8333-dbdd-447b-b940-5aca2f15d00d-utilities\") pod \"community-operators-x2bhv\" (UID: \"a73c8333-dbdd-447b-b940-5aca2f15d00d\") " pod="openshift-marketplace/community-operators-x2bhv" Dec 06 15:34:39 crc kubenswrapper[5003]: I1206 15:34:39.836213 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tsk7n\" (UniqueName: \"kubernetes.io/projected/a73c8333-dbdd-447b-b940-5aca2f15d00d-kube-api-access-tsk7n\") pod \"community-operators-x2bhv\" (UID: \"a73c8333-dbdd-447b-b940-5aca2f15d00d\") " pod="openshift-marketplace/community-operators-x2bhv" Dec 06 15:34:39 crc kubenswrapper[5003]: I1206 15:34:39.851341 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29417250-pmq24" Dec 06 15:34:39 crc kubenswrapper[5003]: I1206 15:34:39.855650 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-gtwdh"] Dec 06 15:34:39 crc kubenswrapper[5003]: W1206 15:34:39.866948 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6c5a30dc_06ca_435f_81b9_576f03f05a19.slice/crio-255332d1f913a88a8becd98fa9b2ebfdbcea6f545d4f2fc8f48a17439bd39f63 WatchSource:0}: Error finding container 255332d1f913a88a8becd98fa9b2ebfdbcea6f545d4f2fc8f48a17439bd39f63: Status 404 returned error can't find the container with id 255332d1f913a88a8becd98fa9b2ebfdbcea6f545d4f2fc8f48a17439bd39f63 Dec 06 15:34:39 crc kubenswrapper[5003]: I1206 15:34:39.876534 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-x2bhv" Dec 06 15:34:39 crc kubenswrapper[5003]: I1206 15:34:39.971574 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-f9d7485db-npbcn" Dec 06 15:34:39 crc kubenswrapper[5003]: I1206 15:34:39.971853 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-f9d7485db-npbcn" Dec 06 15:34:39 crc kubenswrapper[5003]: I1206 15:34:39.983381 5003 patch_prober.go:28] interesting pod/console-f9d7485db-npbcn container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.13:8443/health\": dial tcp 10.217.0.13:8443: connect: connection refused" start-of-body= Dec 06 15:34:39 crc kubenswrapper[5003]: I1206 15:34:39.983455 5003 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-npbcn" podUID="cee02b0a-ce4b-452e-aa00-48c7823c13d0" containerName="console" probeResult="failure" output="Get \"https://10.217.0.13:8443/health\": dial tcp 10.217.0.13:8443: connect: connection refused" Dec 06 15:34:40 crc kubenswrapper[5003]: I1206 15:34:40.018538 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Dec 06 15:34:40 crc kubenswrapper[5003]: I1206 15:34:40.019779 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/37f5fb4c-d8e0-421a-a921-a88e7a934b3a-config-volume\") pod \"37f5fb4c-d8e0-421a-a921-a88e7a934b3a\" (UID: \"37f5fb4c-d8e0-421a-a921-a88e7a934b3a\") " Dec 06 15:34:40 crc kubenswrapper[5003]: I1206 15:34:40.019920 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/37f5fb4c-d8e0-421a-a921-a88e7a934b3a-secret-volume\") pod \"37f5fb4c-d8e0-421a-a921-a88e7a934b3a\" (UID: \"37f5fb4c-d8e0-421a-a921-a88e7a934b3a\") " Dec 06 15:34:40 crc kubenswrapper[5003]: I1206 15:34:40.019972 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nmczj\" (UniqueName: \"kubernetes.io/projected/37f5fb4c-d8e0-421a-a921-a88e7a934b3a-kube-api-access-nmczj\") pod \"37f5fb4c-d8e0-421a-a921-a88e7a934b3a\" (UID: \"37f5fb4c-d8e0-421a-a921-a88e7a934b3a\") " Dec 06 15:34:40 crc kubenswrapper[5003]: I1206 15:34:40.020654 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/37f5fb4c-d8e0-421a-a921-a88e7a934b3a-config-volume" (OuterVolumeSpecName: "config-volume") pod "37f5fb4c-d8e0-421a-a921-a88e7a934b3a" (UID: "37f5fb4c-d8e0-421a-a921-a88e7a934b3a"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 15:34:40 crc kubenswrapper[5003]: I1206 15:34:40.025465 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-jqxj7" Dec 06 15:34:40 crc kubenswrapper[5003]: I1206 15:34:40.026567 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/37f5fb4c-d8e0-421a-a921-a88e7a934b3a-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "37f5fb4c-d8e0-421a-a921-a88e7a934b3a" (UID: "37f5fb4c-d8e0-421a-a921-a88e7a934b3a"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 15:34:40 crc kubenswrapper[5003]: I1206 15:34:40.027422 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-x4xqj" Dec 06 15:34:40 crc kubenswrapper[5003]: I1206 15:34:40.028028 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/37f5fb4c-d8e0-421a-a921-a88e7a934b3a-kube-api-access-nmczj" (OuterVolumeSpecName: "kube-api-access-nmczj") pod "37f5fb4c-d8e0-421a-a921-a88e7a934b3a" (UID: "37f5fb4c-d8e0-421a-a921-a88e7a934b3a"). InnerVolumeSpecName "kube-api-access-nmczj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 15:34:40 crc kubenswrapper[5003]: I1206 15:34:40.122840 5003 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/37f5fb4c-d8e0-421a-a921-a88e7a934b3a-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 06 15:34:40 crc kubenswrapper[5003]: I1206 15:34:40.122873 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nmczj\" (UniqueName: \"kubernetes.io/projected/37f5fb4c-d8e0-421a-a921-a88e7a934b3a-kube-api-access-nmczj\") on node \"crc\" DevicePath \"\"" Dec 06 15:34:40 crc kubenswrapper[5003]: I1206 15:34:40.122882 5003 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/37f5fb4c-d8e0-421a-a921-a88e7a934b3a-config-volume\") on node \"crc\" DevicePath \"\"" Dec 06 15:34:40 crc kubenswrapper[5003]: I1206 15:34:40.129794 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-x2bhv"] Dec 06 15:34:40 crc kubenswrapper[5003]: W1206 15:34:40.161821 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda73c8333_dbdd_447b_b940_5aca2f15d00d.slice/crio-5def1bc74296a7755fffd1e04535358f85fcf146a68c9e762a4a1aa1ad5b4bc4 WatchSource:0}: Error finding container 5def1bc74296a7755fffd1e04535358f85fcf146a68c9e762a4a1aa1ad5b4bc4: Status 404 returned error can't find the container with id 5def1bc74296a7755fffd1e04535358f85fcf146a68c9e762a4a1aa1ad5b4bc4 Dec 06 15:34:40 crc kubenswrapper[5003]: I1206 15:34:40.279091 5003 patch_prober.go:28] interesting pod/downloads-7954f5f757-xs4nd container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.15:8080/\": dial tcp 10.217.0.15:8080: connect: connection refused" start-of-body= Dec 06 15:34:40 crc kubenswrapper[5003]: I1206 15:34:40.279147 5003 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-xs4nd" podUID="495babf4-9201-4523-8a21-44e001d4f4c1" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.15:8080/\": dial tcp 10.217.0.15:8080: connect: connection refused" Dec 06 15:34:40 crc kubenswrapper[5003]: I1206 15:34:40.279099 5003 patch_prober.go:28] interesting pod/downloads-7954f5f757-xs4nd container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.15:8080/\": dial tcp 10.217.0.15:8080: connect: connection refused" start-of-body= Dec 06 15:34:40 crc kubenswrapper[5003]: I1206 15:34:40.279239 5003 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-xs4nd" podUID="495babf4-9201-4523-8a21-44e001d4f4c1" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.15:8080/\": dial tcp 10.217.0.15:8080: connect: connection refused" Dec 06 15:34:40 crc kubenswrapper[5003]: I1206 15:34:40.329212 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-jqxj7"] Dec 06 15:34:40 crc kubenswrapper[5003]: I1206 15:34:40.419259 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-x4xqj"] Dec 06 15:34:40 crc kubenswrapper[5003]: W1206 15:34:40.455588 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod78c47ecf_a4ad_4ac0_85f9_66fa3c8651b4.slice/crio-567130f65a9cacd76dbcdafef3fdbe66ae7108e28795c8f4d63cef1665b499e8 WatchSource:0}: Error finding container 567130f65a9cacd76dbcdafef3fdbe66ae7108e28795c8f4d63cef1665b499e8: Status 404 returned error can't find the container with id 567130f65a9cacd76dbcdafef3fdbe66ae7108e28795c8f4d63cef1665b499e8 Dec 06 15:34:40 crc kubenswrapper[5003]: I1206 15:34:40.547805 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ingress/router-default-5444994796-4qjqw" Dec 06 15:34:40 crc kubenswrapper[5003]: I1206 15:34:40.551748 5003 patch_prober.go:28] interesting pod/router-default-5444994796-4qjqw container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 06 15:34:40 crc kubenswrapper[5003]: [-]has-synced failed: reason withheld Dec 06 15:34:40 crc kubenswrapper[5003]: [+]process-running ok Dec 06 15:34:40 crc kubenswrapper[5003]: healthz check failed Dec 06 15:34:40 crc kubenswrapper[5003]: I1206 15:34:40.552082 5003 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-4qjqw" podUID="b4d4786f-591a-43fb-afe1-04c8daa257a7" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 06 15:34:40 crc kubenswrapper[5003]: I1206 15:34:40.592168 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-x4xqj" event={"ID":"78c47ecf-a4ad-4ac0-85f9-66fa3c8651b4","Type":"ContainerStarted","Data":"567130f65a9cacd76dbcdafef3fdbe66ae7108e28795c8f4d63cef1665b499e8"} Dec 06 15:34:40 crc kubenswrapper[5003]: I1206 15:34:40.603805 5003 generic.go:334] "Generic (PLEG): container finished" podID="6c5a30dc-06ca-435f-81b9-576f03f05a19" containerID="9216affd75358bb2a6ff2e3bc42ff778f149f28f1117596035ccf06ebe995dbf" exitCode=0 Dec 06 15:34:40 crc kubenswrapper[5003]: I1206 15:34:40.604350 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gtwdh" event={"ID":"6c5a30dc-06ca-435f-81b9-576f03f05a19","Type":"ContainerDied","Data":"9216affd75358bb2a6ff2e3bc42ff778f149f28f1117596035ccf06ebe995dbf"} Dec 06 15:34:40 crc kubenswrapper[5003]: I1206 15:34:40.604404 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gtwdh" event={"ID":"6c5a30dc-06ca-435f-81b9-576f03f05a19","Type":"ContainerStarted","Data":"255332d1f913a88a8becd98fa9b2ebfdbcea6f545d4f2fc8f48a17439bd39f63"} Dec 06 15:34:40 crc kubenswrapper[5003]: I1206 15:34:40.605985 5003 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 06 15:34:40 crc kubenswrapper[5003]: I1206 15:34:40.608727 5003 generic.go:334] "Generic (PLEG): container finished" podID="a73c8333-dbdd-447b-b940-5aca2f15d00d" containerID="981a6630aeed8abcfc1d9811f705234870ede08d7b9cc9e59219e3d8c339eacf" exitCode=0 Dec 06 15:34:40 crc kubenswrapper[5003]: I1206 15:34:40.609108 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-x2bhv" event={"ID":"a73c8333-dbdd-447b-b940-5aca2f15d00d","Type":"ContainerDied","Data":"981a6630aeed8abcfc1d9811f705234870ede08d7b9cc9e59219e3d8c339eacf"} Dec 06 15:34:40 crc kubenswrapper[5003]: I1206 15:34:40.609148 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-x2bhv" event={"ID":"a73c8333-dbdd-447b-b940-5aca2f15d00d","Type":"ContainerStarted","Data":"5def1bc74296a7755fffd1e04535358f85fcf146a68c9e762a4a1aa1ad5b4bc4"} Dec 06 15:34:40 crc kubenswrapper[5003]: I1206 15:34:40.615375 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jqxj7" event={"ID":"dde2226a-d12b-4c3b-a396-cf72781488ca","Type":"ContainerStarted","Data":"458cfd98d616a9e7de2f58e0c0d6dfa5b09f40963ae8eb61770509f66f95f37f"} Dec 06 15:34:40 crc kubenswrapper[5003]: I1206 15:34:40.615431 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jqxj7" event={"ID":"dde2226a-d12b-4c3b-a396-cf72781488ca","Type":"ContainerStarted","Data":"1c87dfd637c4e8f0c657ef5290730c1845ce9b786c88f23f299ca18bfd7552a2"} Dec 06 15:34:40 crc kubenswrapper[5003]: I1206 15:34:40.623114 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Dec 06 15:34:40 crc kubenswrapper[5003]: E1206 15:34:40.623371 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="37f5fb4c-d8e0-421a-a921-a88e7a934b3a" containerName="collect-profiles" Dec 06 15:34:40 crc kubenswrapper[5003]: I1206 15:34:40.623393 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="37f5fb4c-d8e0-421a-a921-a88e7a934b3a" containerName="collect-profiles" Dec 06 15:34:40 crc kubenswrapper[5003]: I1206 15:34:40.623560 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="37f5fb4c-d8e0-421a-a921-a88e7a934b3a" containerName="collect-profiles" Dec 06 15:34:40 crc kubenswrapper[5003]: I1206 15:34:40.624035 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 06 15:34:40 crc kubenswrapper[5003]: I1206 15:34:40.625860 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29417250-pmq24" Dec 06 15:34:40 crc kubenswrapper[5003]: I1206 15:34:40.626700 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29417250-pmq24" event={"ID":"37f5fb4c-d8e0-421a-a921-a88e7a934b3a","Type":"ContainerDied","Data":"6122352c223eac78345784271f6e5682821a5be0c2a9fa7e65e1179875d8bc20"} Dec 06 15:34:40 crc kubenswrapper[5003]: I1206 15:34:40.626744 5003 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6122352c223eac78345784271f6e5682821a5be0c2a9fa7e65e1179875d8bc20" Dec 06 15:34:40 crc kubenswrapper[5003]: I1206 15:34:40.627096 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager"/"kube-root-ca.crt" Dec 06 15:34:40 crc kubenswrapper[5003]: I1206 15:34:40.627304 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager"/"installer-sa-dockercfg-kjl2n" Dec 06 15:34:40 crc kubenswrapper[5003]: I1206 15:34:40.637999 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Dec 06 15:34:40 crc kubenswrapper[5003]: I1206 15:34:40.645322 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-lwc4r" Dec 06 15:34:40 crc kubenswrapper[5003]: I1206 15:34:40.740715 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/de788a8f-3932-4514-8bc4-c808ef57661e-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"de788a8f-3932-4514-8bc4-c808ef57661e\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 06 15:34:40 crc kubenswrapper[5003]: I1206 15:34:40.740785 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/de788a8f-3932-4514-8bc4-c808ef57661e-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"de788a8f-3932-4514-8bc4-c808ef57661e\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 06 15:34:40 crc kubenswrapper[5003]: I1206 15:34:40.841958 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/de788a8f-3932-4514-8bc4-c808ef57661e-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"de788a8f-3932-4514-8bc4-c808ef57661e\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 06 15:34:40 crc kubenswrapper[5003]: I1206 15:34:40.842000 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/de788a8f-3932-4514-8bc4-c808ef57661e-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"de788a8f-3932-4514-8bc4-c808ef57661e\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 06 15:34:40 crc kubenswrapper[5003]: I1206 15:34:40.842066 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/de788a8f-3932-4514-8bc4-c808ef57661e-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"de788a8f-3932-4514-8bc4-c808ef57661e\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 06 15:34:40 crc kubenswrapper[5003]: I1206 15:34:40.862755 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/de788a8f-3932-4514-8bc4-c808ef57661e-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"de788a8f-3932-4514-8bc4-c808ef57661e\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 06 15:34:40 crc kubenswrapper[5003]: I1206 15:34:40.921298 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-b5rnp"] Dec 06 15:34:40 crc kubenswrapper[5003]: I1206 15:34:40.922642 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-b5rnp" Dec 06 15:34:40 crc kubenswrapper[5003]: I1206 15:34:40.924818 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Dec 06 15:34:40 crc kubenswrapper[5003]: I1206 15:34:40.933062 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-b5rnp"] Dec 06 15:34:41 crc kubenswrapper[5003]: I1206 15:34:41.043854 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w69jz\" (UniqueName: \"kubernetes.io/projected/67506930-842d-411a-b032-26874042995d-kube-api-access-w69jz\") pod \"redhat-marketplace-b5rnp\" (UID: \"67506930-842d-411a-b032-26874042995d\") " pod="openshift-marketplace/redhat-marketplace-b5rnp" Dec 06 15:34:41 crc kubenswrapper[5003]: I1206 15:34:41.045236 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 06 15:34:41 crc kubenswrapper[5003]: I1206 15:34:41.045318 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/67506930-842d-411a-b032-26874042995d-catalog-content\") pod \"redhat-marketplace-b5rnp\" (UID: \"67506930-842d-411a-b032-26874042995d\") " pod="openshift-marketplace/redhat-marketplace-b5rnp" Dec 06 15:34:41 crc kubenswrapper[5003]: I1206 15:34:41.045554 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/67506930-842d-411a-b032-26874042995d-utilities\") pod \"redhat-marketplace-b5rnp\" (UID: \"67506930-842d-411a-b032-26874042995d\") " pod="openshift-marketplace/redhat-marketplace-b5rnp" Dec 06 15:34:41 crc kubenswrapper[5003]: I1206 15:34:41.147457 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w69jz\" (UniqueName: \"kubernetes.io/projected/67506930-842d-411a-b032-26874042995d-kube-api-access-w69jz\") pod \"redhat-marketplace-b5rnp\" (UID: \"67506930-842d-411a-b032-26874042995d\") " pod="openshift-marketplace/redhat-marketplace-b5rnp" Dec 06 15:34:41 crc kubenswrapper[5003]: I1206 15:34:41.147555 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/67506930-842d-411a-b032-26874042995d-catalog-content\") pod \"redhat-marketplace-b5rnp\" (UID: \"67506930-842d-411a-b032-26874042995d\") " pod="openshift-marketplace/redhat-marketplace-b5rnp" Dec 06 15:34:41 crc kubenswrapper[5003]: I1206 15:34:41.147622 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/67506930-842d-411a-b032-26874042995d-utilities\") pod \"redhat-marketplace-b5rnp\" (UID: \"67506930-842d-411a-b032-26874042995d\") " pod="openshift-marketplace/redhat-marketplace-b5rnp" Dec 06 15:34:41 crc kubenswrapper[5003]: I1206 15:34:41.148256 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/67506930-842d-411a-b032-26874042995d-utilities\") pod \"redhat-marketplace-b5rnp\" (UID: \"67506930-842d-411a-b032-26874042995d\") " pod="openshift-marketplace/redhat-marketplace-b5rnp" Dec 06 15:34:41 crc kubenswrapper[5003]: I1206 15:34:41.148329 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/67506930-842d-411a-b032-26874042995d-catalog-content\") pod \"redhat-marketplace-b5rnp\" (UID: \"67506930-842d-411a-b032-26874042995d\") " pod="openshift-marketplace/redhat-marketplace-b5rnp" Dec 06 15:34:41 crc kubenswrapper[5003]: I1206 15:34:41.181753 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w69jz\" (UniqueName: \"kubernetes.io/projected/67506930-842d-411a-b032-26874042995d-kube-api-access-w69jz\") pod \"redhat-marketplace-b5rnp\" (UID: \"67506930-842d-411a-b032-26874042995d\") " pod="openshift-marketplace/redhat-marketplace-b5rnp" Dec 06 15:34:41 crc kubenswrapper[5003]: I1206 15:34:41.237392 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-b5rnp" Dec 06 15:34:41 crc kubenswrapper[5003]: I1206 15:34:41.328915 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-pzkrw"] Dec 06 15:34:41 crc kubenswrapper[5003]: I1206 15:34:41.330185 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-pzkrw" Dec 06 15:34:41 crc kubenswrapper[5003]: I1206 15:34:41.339380 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-pzkrw"] Dec 06 15:34:41 crc kubenswrapper[5003]: I1206 15:34:41.423238 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Dec 06 15:34:41 crc kubenswrapper[5003]: I1206 15:34:41.451582 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b7b85911-8f34-416f-b3f9-2776b2aa7876-utilities\") pod \"redhat-marketplace-pzkrw\" (UID: \"b7b85911-8f34-416f-b3f9-2776b2aa7876\") " pod="openshift-marketplace/redhat-marketplace-pzkrw" Dec 06 15:34:41 crc kubenswrapper[5003]: I1206 15:34:41.451639 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mq5bv\" (UniqueName: \"kubernetes.io/projected/b7b85911-8f34-416f-b3f9-2776b2aa7876-kube-api-access-mq5bv\") pod \"redhat-marketplace-pzkrw\" (UID: \"b7b85911-8f34-416f-b3f9-2776b2aa7876\") " pod="openshift-marketplace/redhat-marketplace-pzkrw" Dec 06 15:34:41 crc kubenswrapper[5003]: I1206 15:34:41.451709 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b7b85911-8f34-416f-b3f9-2776b2aa7876-catalog-content\") pod \"redhat-marketplace-pzkrw\" (UID: \"b7b85911-8f34-416f-b3f9-2776b2aa7876\") " pod="openshift-marketplace/redhat-marketplace-pzkrw" Dec 06 15:34:41 crc kubenswrapper[5003]: I1206 15:34:41.550543 5003 patch_prober.go:28] interesting pod/router-default-5444994796-4qjqw container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 06 15:34:41 crc kubenswrapper[5003]: [-]has-synced failed: reason withheld Dec 06 15:34:41 crc kubenswrapper[5003]: [+]process-running ok Dec 06 15:34:41 crc kubenswrapper[5003]: healthz check failed Dec 06 15:34:41 crc kubenswrapper[5003]: I1206 15:34:41.550645 5003 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-4qjqw" podUID="b4d4786f-591a-43fb-afe1-04c8daa257a7" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 06 15:34:41 crc kubenswrapper[5003]: I1206 15:34:41.553129 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b7b85911-8f34-416f-b3f9-2776b2aa7876-catalog-content\") pod \"redhat-marketplace-pzkrw\" (UID: \"b7b85911-8f34-416f-b3f9-2776b2aa7876\") " pod="openshift-marketplace/redhat-marketplace-pzkrw" Dec 06 15:34:41 crc kubenswrapper[5003]: I1206 15:34:41.553280 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b7b85911-8f34-416f-b3f9-2776b2aa7876-utilities\") pod \"redhat-marketplace-pzkrw\" (UID: \"b7b85911-8f34-416f-b3f9-2776b2aa7876\") " pod="openshift-marketplace/redhat-marketplace-pzkrw" Dec 06 15:34:41 crc kubenswrapper[5003]: I1206 15:34:41.553372 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mq5bv\" (UniqueName: \"kubernetes.io/projected/b7b85911-8f34-416f-b3f9-2776b2aa7876-kube-api-access-mq5bv\") pod \"redhat-marketplace-pzkrw\" (UID: \"b7b85911-8f34-416f-b3f9-2776b2aa7876\") " pod="openshift-marketplace/redhat-marketplace-pzkrw" Dec 06 15:34:41 crc kubenswrapper[5003]: I1206 15:34:41.554116 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b7b85911-8f34-416f-b3f9-2776b2aa7876-utilities\") pod \"redhat-marketplace-pzkrw\" (UID: \"b7b85911-8f34-416f-b3f9-2776b2aa7876\") " pod="openshift-marketplace/redhat-marketplace-pzkrw" Dec 06 15:34:41 crc kubenswrapper[5003]: I1206 15:34:41.554251 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b7b85911-8f34-416f-b3f9-2776b2aa7876-catalog-content\") pod \"redhat-marketplace-pzkrw\" (UID: \"b7b85911-8f34-416f-b3f9-2776b2aa7876\") " pod="openshift-marketplace/redhat-marketplace-pzkrw" Dec 06 15:34:41 crc kubenswrapper[5003]: I1206 15:34:41.571119 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mq5bv\" (UniqueName: \"kubernetes.io/projected/b7b85911-8f34-416f-b3f9-2776b2aa7876-kube-api-access-mq5bv\") pod \"redhat-marketplace-pzkrw\" (UID: \"b7b85911-8f34-416f-b3f9-2776b2aa7876\") " pod="openshift-marketplace/redhat-marketplace-pzkrw" Dec 06 15:34:41 crc kubenswrapper[5003]: I1206 15:34:41.659104 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-pzkrw" Dec 06 15:34:42 crc kubenswrapper[5003]: I1206 15:34:42.035803 5003 generic.go:334] "Generic (PLEG): container finished" podID="dde2226a-d12b-4c3b-a396-cf72781488ca" containerID="458cfd98d616a9e7de2f58e0c0d6dfa5b09f40963ae8eb61770509f66f95f37f" exitCode=0 Dec 06 15:34:42 crc kubenswrapper[5003]: I1206 15:34:42.035881 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jqxj7" event={"ID":"dde2226a-d12b-4c3b-a396-cf72781488ca","Type":"ContainerDied","Data":"458cfd98d616a9e7de2f58e0c0d6dfa5b09f40963ae8eb61770509f66f95f37f"} Dec 06 15:34:42 crc kubenswrapper[5003]: I1206 15:34:42.042612 5003 generic.go:334] "Generic (PLEG): container finished" podID="78c47ecf-a4ad-4ac0-85f9-66fa3c8651b4" containerID="b9995232ce54c673c8a974886149cdf14f22fe73fa727a0a2b165c84d7b2c2ac" exitCode=0 Dec 06 15:34:42 crc kubenswrapper[5003]: I1206 15:34:42.042735 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-x4xqj" event={"ID":"78c47ecf-a4ad-4ac0-85f9-66fa3c8651b4","Type":"ContainerDied","Data":"b9995232ce54c673c8a974886149cdf14f22fe73fa727a0a2b165c84d7b2c2ac"} Dec 06 15:34:42 crc kubenswrapper[5003]: I1206 15:34:42.134830 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-mbw9t"] Dec 06 15:34:42 crc kubenswrapper[5003]: I1206 15:34:42.137401 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-mbw9t" Dec 06 15:34:42 crc kubenswrapper[5003]: I1206 15:34:42.143139 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-mbw9t"] Dec 06 15:34:42 crc kubenswrapper[5003]: I1206 15:34:42.143758 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Dec 06 15:34:42 crc kubenswrapper[5003]: I1206 15:34:42.264370 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7370c672-28ad-4228-8285-c113c6675ba8-catalog-content\") pod \"redhat-operators-mbw9t\" (UID: \"7370c672-28ad-4228-8285-c113c6675ba8\") " pod="openshift-marketplace/redhat-operators-mbw9t" Dec 06 15:34:42 crc kubenswrapper[5003]: I1206 15:34:42.264453 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7370c672-28ad-4228-8285-c113c6675ba8-utilities\") pod \"redhat-operators-mbw9t\" (UID: \"7370c672-28ad-4228-8285-c113c6675ba8\") " pod="openshift-marketplace/redhat-operators-mbw9t" Dec 06 15:34:42 crc kubenswrapper[5003]: I1206 15:34:42.264548 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qbncs\" (UniqueName: \"kubernetes.io/projected/7370c672-28ad-4228-8285-c113c6675ba8-kube-api-access-qbncs\") pod \"redhat-operators-mbw9t\" (UID: \"7370c672-28ad-4228-8285-c113c6675ba8\") " pod="openshift-marketplace/redhat-operators-mbw9t" Dec 06 15:34:42 crc kubenswrapper[5003]: I1206 15:34:42.365731 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7370c672-28ad-4228-8285-c113c6675ba8-catalog-content\") pod \"redhat-operators-mbw9t\" (UID: \"7370c672-28ad-4228-8285-c113c6675ba8\") " pod="openshift-marketplace/redhat-operators-mbw9t" Dec 06 15:34:42 crc kubenswrapper[5003]: I1206 15:34:42.366020 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7370c672-28ad-4228-8285-c113c6675ba8-utilities\") pod \"redhat-operators-mbw9t\" (UID: \"7370c672-28ad-4228-8285-c113c6675ba8\") " pod="openshift-marketplace/redhat-operators-mbw9t" Dec 06 15:34:42 crc kubenswrapper[5003]: I1206 15:34:42.366046 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qbncs\" (UniqueName: \"kubernetes.io/projected/7370c672-28ad-4228-8285-c113c6675ba8-kube-api-access-qbncs\") pod \"redhat-operators-mbw9t\" (UID: \"7370c672-28ad-4228-8285-c113c6675ba8\") " pod="openshift-marketplace/redhat-operators-mbw9t" Dec 06 15:34:42 crc kubenswrapper[5003]: I1206 15:34:42.366328 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7370c672-28ad-4228-8285-c113c6675ba8-catalog-content\") pod \"redhat-operators-mbw9t\" (UID: \"7370c672-28ad-4228-8285-c113c6675ba8\") " pod="openshift-marketplace/redhat-operators-mbw9t" Dec 06 15:34:42 crc kubenswrapper[5003]: I1206 15:34:42.366648 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7370c672-28ad-4228-8285-c113c6675ba8-utilities\") pod \"redhat-operators-mbw9t\" (UID: \"7370c672-28ad-4228-8285-c113c6675ba8\") " pod="openshift-marketplace/redhat-operators-mbw9t" Dec 06 15:34:42 crc kubenswrapper[5003]: I1206 15:34:42.397070 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qbncs\" (UniqueName: \"kubernetes.io/projected/7370c672-28ad-4228-8285-c113c6675ba8-kube-api-access-qbncs\") pod \"redhat-operators-mbw9t\" (UID: \"7370c672-28ad-4228-8285-c113c6675ba8\") " pod="openshift-marketplace/redhat-operators-mbw9t" Dec 06 15:34:42 crc kubenswrapper[5003]: I1206 15:34:42.481362 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-mbw9t" Dec 06 15:34:42 crc kubenswrapper[5003]: I1206 15:34:42.541625 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-gwpxn"] Dec 06 15:34:42 crc kubenswrapper[5003]: I1206 15:34:42.543015 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-gwpxn" Dec 06 15:34:42 crc kubenswrapper[5003]: I1206 15:34:42.550010 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-gwpxn"] Dec 06 15:34:42 crc kubenswrapper[5003]: I1206 15:34:42.573550 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-ingress/router-default-5444994796-4qjqw" Dec 06 15:34:42 crc kubenswrapper[5003]: I1206 15:34:42.576431 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ingress/router-default-5444994796-4qjqw" Dec 06 15:34:42 crc kubenswrapper[5003]: I1206 15:34:42.671652 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-746fx\" (UniqueName: \"kubernetes.io/projected/158c5179-aee8-4e5b-8a39-38d19808b3fd-kube-api-access-746fx\") pod \"redhat-operators-gwpxn\" (UID: \"158c5179-aee8-4e5b-8a39-38d19808b3fd\") " pod="openshift-marketplace/redhat-operators-gwpxn" Dec 06 15:34:42 crc kubenswrapper[5003]: I1206 15:34:42.671693 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/158c5179-aee8-4e5b-8a39-38d19808b3fd-catalog-content\") pod \"redhat-operators-gwpxn\" (UID: \"158c5179-aee8-4e5b-8a39-38d19808b3fd\") " pod="openshift-marketplace/redhat-operators-gwpxn" Dec 06 15:34:42 crc kubenswrapper[5003]: I1206 15:34:42.671727 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/158c5179-aee8-4e5b-8a39-38d19808b3fd-utilities\") pod \"redhat-operators-gwpxn\" (UID: \"158c5179-aee8-4e5b-8a39-38d19808b3fd\") " pod="openshift-marketplace/redhat-operators-gwpxn" Dec 06 15:34:42 crc kubenswrapper[5003]: I1206 15:34:42.738230 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-pzkrw"] Dec 06 15:34:42 crc kubenswrapper[5003]: I1206 15:34:42.772560 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/158c5179-aee8-4e5b-8a39-38d19808b3fd-catalog-content\") pod \"redhat-operators-gwpxn\" (UID: \"158c5179-aee8-4e5b-8a39-38d19808b3fd\") " pod="openshift-marketplace/redhat-operators-gwpxn" Dec 06 15:34:42 crc kubenswrapper[5003]: I1206 15:34:42.772611 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/158c5179-aee8-4e5b-8a39-38d19808b3fd-utilities\") pod \"redhat-operators-gwpxn\" (UID: \"158c5179-aee8-4e5b-8a39-38d19808b3fd\") " pod="openshift-marketplace/redhat-operators-gwpxn" Dec 06 15:34:42 crc kubenswrapper[5003]: I1206 15:34:42.772694 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-746fx\" (UniqueName: \"kubernetes.io/projected/158c5179-aee8-4e5b-8a39-38d19808b3fd-kube-api-access-746fx\") pod \"redhat-operators-gwpxn\" (UID: \"158c5179-aee8-4e5b-8a39-38d19808b3fd\") " pod="openshift-marketplace/redhat-operators-gwpxn" Dec 06 15:34:42 crc kubenswrapper[5003]: I1206 15:34:42.773232 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/158c5179-aee8-4e5b-8a39-38d19808b3fd-catalog-content\") pod \"redhat-operators-gwpxn\" (UID: \"158c5179-aee8-4e5b-8a39-38d19808b3fd\") " pod="openshift-marketplace/redhat-operators-gwpxn" Dec 06 15:34:42 crc kubenswrapper[5003]: I1206 15:34:42.773417 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/158c5179-aee8-4e5b-8a39-38d19808b3fd-utilities\") pod \"redhat-operators-gwpxn\" (UID: \"158c5179-aee8-4e5b-8a39-38d19808b3fd\") " pod="openshift-marketplace/redhat-operators-gwpxn" Dec 06 15:34:42 crc kubenswrapper[5003]: I1206 15:34:42.814938 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-746fx\" (UniqueName: \"kubernetes.io/projected/158c5179-aee8-4e5b-8a39-38d19808b3fd-kube-api-access-746fx\") pod \"redhat-operators-gwpxn\" (UID: \"158c5179-aee8-4e5b-8a39-38d19808b3fd\") " pod="openshift-marketplace/redhat-operators-gwpxn" Dec 06 15:34:42 crc kubenswrapper[5003]: I1206 15:34:42.826475 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-b5rnp"] Dec 06 15:34:42 crc kubenswrapper[5003]: I1206 15:34:42.939075 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-gwpxn" Dec 06 15:34:43 crc kubenswrapper[5003]: I1206 15:34:43.083102 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-b5rnp" event={"ID":"67506930-842d-411a-b032-26874042995d","Type":"ContainerStarted","Data":"06944fdc684288d5d0a24af32d250c71a9d01e10e3cee142eb570d81a158e57f"} Dec 06 15:34:43 crc kubenswrapper[5003]: I1206 15:34:43.087903 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"de788a8f-3932-4514-8bc4-c808ef57661e","Type":"ContainerStarted","Data":"a8db46232c1ac18f1bd0193416121593b37a293b445ece362161144821916cbb"} Dec 06 15:34:43 crc kubenswrapper[5003]: I1206 15:34:43.087947 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"de788a8f-3932-4514-8bc4-c808ef57661e","Type":"ContainerStarted","Data":"5dcd2ead659749d338b96f32a981bba83c9b5d0f5da5f9b5da06f570c4cde4ce"} Dec 06 15:34:43 crc kubenswrapper[5003]: I1206 15:34:43.091747 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pzkrw" event={"ID":"b7b85911-8f34-416f-b3f9-2776b2aa7876","Type":"ContainerStarted","Data":"af743930f7f20344700da3a8be2d8efbc6ed7095a6c88c3923bd7817738db6bd"} Dec 06 15:34:43 crc kubenswrapper[5003]: I1206 15:34:43.103973 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-mbw9t"] Dec 06 15:34:43 crc kubenswrapper[5003]: I1206 15:34:43.116511 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/revision-pruner-9-crc" podStartSLOduration=3.116445843 podStartE2EDuration="3.116445843s" podCreationTimestamp="2025-12-06 15:34:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 15:34:43.109863046 +0000 UTC m=+161.643217457" watchObservedRunningTime="2025-12-06 15:34:43.116445843 +0000 UTC m=+161.649800244" Dec 06 15:34:43 crc kubenswrapper[5003]: W1206 15:34:43.230105 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7370c672_28ad_4228_8285_c113c6675ba8.slice/crio-328e2680dcd49b7f80a84e77f0fefb91e9a07a50d843e3172db1ce340c2fcd54 WatchSource:0}: Error finding container 328e2680dcd49b7f80a84e77f0fefb91e9a07a50d843e3172db1ce340c2fcd54: Status 404 returned error can't find the container with id 328e2680dcd49b7f80a84e77f0fefb91e9a07a50d843e3172db1ce340c2fcd54 Dec 06 15:34:43 crc kubenswrapper[5003]: I1206 15:34:43.486066 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-gwpxn"] Dec 06 15:34:43 crc kubenswrapper[5003]: W1206 15:34:43.539611 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod158c5179_aee8_4e5b_8a39_38d19808b3fd.slice/crio-e79778a80554c3eb28479826ee0b744470c9f5722b516f95e8f11e12949cf9ad WatchSource:0}: Error finding container e79778a80554c3eb28479826ee0b744470c9f5722b516f95e8f11e12949cf9ad: Status 404 returned error can't find the container with id e79778a80554c3eb28479826ee0b744470c9f5722b516f95e8f11e12949cf9ad Dec 06 15:34:44 crc kubenswrapper[5003]: I1206 15:34:44.151091 5003 generic.go:334] "Generic (PLEG): container finished" podID="b7b85911-8f34-416f-b3f9-2776b2aa7876" containerID="1863ec88537c134f63306a8c8736febc2272e940da26a2eae6588201b34f5e67" exitCode=0 Dec 06 15:34:44 crc kubenswrapper[5003]: I1206 15:34:44.151213 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pzkrw" event={"ID":"b7b85911-8f34-416f-b3f9-2776b2aa7876","Type":"ContainerDied","Data":"1863ec88537c134f63306a8c8736febc2272e940da26a2eae6588201b34f5e67"} Dec 06 15:34:44 crc kubenswrapper[5003]: I1206 15:34:44.156555 5003 generic.go:334] "Generic (PLEG): container finished" podID="158c5179-aee8-4e5b-8a39-38d19808b3fd" containerID="4850de3c644d58103f4e9cce2a671751bccf2a03c53b53d46feba76883276880" exitCode=0 Dec 06 15:34:44 crc kubenswrapper[5003]: I1206 15:34:44.156621 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gwpxn" event={"ID":"158c5179-aee8-4e5b-8a39-38d19808b3fd","Type":"ContainerDied","Data":"4850de3c644d58103f4e9cce2a671751bccf2a03c53b53d46feba76883276880"} Dec 06 15:34:44 crc kubenswrapper[5003]: I1206 15:34:44.156647 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gwpxn" event={"ID":"158c5179-aee8-4e5b-8a39-38d19808b3fd","Type":"ContainerStarted","Data":"e79778a80554c3eb28479826ee0b744470c9f5722b516f95e8f11e12949cf9ad"} Dec 06 15:34:44 crc kubenswrapper[5003]: I1206 15:34:44.158837 5003 generic.go:334] "Generic (PLEG): container finished" podID="7370c672-28ad-4228-8285-c113c6675ba8" containerID="7e8e69326ef21a317a3c14f1e2f1362c7e0f7e0232f2349288a852a406ca737c" exitCode=0 Dec 06 15:34:44 crc kubenswrapper[5003]: I1206 15:34:44.158883 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mbw9t" event={"ID":"7370c672-28ad-4228-8285-c113c6675ba8","Type":"ContainerDied","Data":"7e8e69326ef21a317a3c14f1e2f1362c7e0f7e0232f2349288a852a406ca737c"} Dec 06 15:34:44 crc kubenswrapper[5003]: I1206 15:34:44.158902 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mbw9t" event={"ID":"7370c672-28ad-4228-8285-c113c6675ba8","Type":"ContainerStarted","Data":"328e2680dcd49b7f80a84e77f0fefb91e9a07a50d843e3172db1ce340c2fcd54"} Dec 06 15:34:44 crc kubenswrapper[5003]: I1206 15:34:44.180777 5003 generic.go:334] "Generic (PLEG): container finished" podID="67506930-842d-411a-b032-26874042995d" containerID="945cf848ab3571079ee3e03dacb56cb9946d9ab9f2c6e1658097d0818e068eb9" exitCode=0 Dec 06 15:34:44 crc kubenswrapper[5003]: I1206 15:34:44.180861 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-b5rnp" event={"ID":"67506930-842d-411a-b032-26874042995d","Type":"ContainerDied","Data":"945cf848ab3571079ee3e03dacb56cb9946d9ab9f2c6e1658097d0818e068eb9"} Dec 06 15:34:44 crc kubenswrapper[5003]: I1206 15:34:44.203980 5003 generic.go:334] "Generic (PLEG): container finished" podID="de788a8f-3932-4514-8bc4-c808ef57661e" containerID="a8db46232c1ac18f1bd0193416121593b37a293b445ece362161144821916cbb" exitCode=0 Dec 06 15:34:44 crc kubenswrapper[5003]: I1206 15:34:44.204016 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"de788a8f-3932-4514-8bc4-c808ef57661e","Type":"ContainerDied","Data":"a8db46232c1ac18f1bd0193416121593b37a293b445ece362161144821916cbb"} Dec 06 15:34:44 crc kubenswrapper[5003]: I1206 15:34:44.255131 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Dec 06 15:34:44 crc kubenswrapper[5003]: I1206 15:34:44.255801 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 06 15:34:44 crc kubenswrapper[5003]: I1206 15:34:44.259635 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Dec 06 15:34:44 crc kubenswrapper[5003]: I1206 15:34:44.260281 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Dec 06 15:34:44 crc kubenswrapper[5003]: I1206 15:34:44.261385 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Dec 06 15:34:44 crc kubenswrapper[5003]: I1206 15:34:44.342498 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/0b5d45f6-ad6d-44a7-a125-eebc7e38a9b8-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"0b5d45f6-ad6d-44a7-a125-eebc7e38a9b8\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 06 15:34:44 crc kubenswrapper[5003]: I1206 15:34:44.342646 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b5d45f6-ad6d-44a7-a125-eebc7e38a9b8-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"0b5d45f6-ad6d-44a7-a125-eebc7e38a9b8\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 06 15:34:44 crc kubenswrapper[5003]: I1206 15:34:44.444086 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/0b5d45f6-ad6d-44a7-a125-eebc7e38a9b8-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"0b5d45f6-ad6d-44a7-a125-eebc7e38a9b8\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 06 15:34:44 crc kubenswrapper[5003]: I1206 15:34:44.444191 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b5d45f6-ad6d-44a7-a125-eebc7e38a9b8-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"0b5d45f6-ad6d-44a7-a125-eebc7e38a9b8\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 06 15:34:44 crc kubenswrapper[5003]: I1206 15:34:44.444581 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/0b5d45f6-ad6d-44a7-a125-eebc7e38a9b8-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"0b5d45f6-ad6d-44a7-a125-eebc7e38a9b8\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 06 15:34:44 crc kubenswrapper[5003]: I1206 15:34:44.495394 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b5d45f6-ad6d-44a7-a125-eebc7e38a9b8-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"0b5d45f6-ad6d-44a7-a125-eebc7e38a9b8\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 06 15:34:44 crc kubenswrapper[5003]: I1206 15:34:44.606751 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 06 15:34:44 crc kubenswrapper[5003]: I1206 15:34:44.979986 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Dec 06 15:34:45 crc kubenswrapper[5003]: W1206 15:34:45.007234 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-pod0b5d45f6_ad6d_44a7_a125_eebc7e38a9b8.slice/crio-617921a60557531ff587b64ae54ecdeca206a9597e4f4b934d8942c12f7c85f3 WatchSource:0}: Error finding container 617921a60557531ff587b64ae54ecdeca206a9597e4f4b934d8942c12f7c85f3: Status 404 returned error can't find the container with id 617921a60557531ff587b64ae54ecdeca206a9597e4f4b934d8942c12f7c85f3 Dec 06 15:34:45 crc kubenswrapper[5003]: I1206 15:34:45.226508 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"0b5d45f6-ad6d-44a7-a125-eebc7e38a9b8","Type":"ContainerStarted","Data":"617921a60557531ff587b64ae54ecdeca206a9597e4f4b934d8942c12f7c85f3"} Dec 06 15:34:45 crc kubenswrapper[5003]: I1206 15:34:45.553099 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 06 15:34:45 crc kubenswrapper[5003]: I1206 15:34:45.573195 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-dns/dns-default-wx2w9" Dec 06 15:34:45 crc kubenswrapper[5003]: I1206 15:34:45.686547 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/de788a8f-3932-4514-8bc4-c808ef57661e-kube-api-access\") pod \"de788a8f-3932-4514-8bc4-c808ef57661e\" (UID: \"de788a8f-3932-4514-8bc4-c808ef57661e\") " Dec 06 15:34:45 crc kubenswrapper[5003]: I1206 15:34:45.686599 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/de788a8f-3932-4514-8bc4-c808ef57661e-kubelet-dir\") pod \"de788a8f-3932-4514-8bc4-c808ef57661e\" (UID: \"de788a8f-3932-4514-8bc4-c808ef57661e\") " Dec 06 15:34:45 crc kubenswrapper[5003]: I1206 15:34:45.687917 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/de788a8f-3932-4514-8bc4-c808ef57661e-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "de788a8f-3932-4514-8bc4-c808ef57661e" (UID: "de788a8f-3932-4514-8bc4-c808ef57661e"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 06 15:34:45 crc kubenswrapper[5003]: I1206 15:34:45.705660 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/de788a8f-3932-4514-8bc4-c808ef57661e-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "de788a8f-3932-4514-8bc4-c808ef57661e" (UID: "de788a8f-3932-4514-8bc4-c808ef57661e"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 15:34:45 crc kubenswrapper[5003]: I1206 15:34:45.788019 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/de788a8f-3932-4514-8bc4-c808ef57661e-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 06 15:34:45 crc kubenswrapper[5003]: I1206 15:34:45.788059 5003 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/de788a8f-3932-4514-8bc4-c808ef57661e-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 06 15:34:46 crc kubenswrapper[5003]: I1206 15:34:46.258029 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"de788a8f-3932-4514-8bc4-c808ef57661e","Type":"ContainerDied","Data":"5dcd2ead659749d338b96f32a981bba83c9b5d0f5da5f9b5da06f570c4cde4ce"} Dec 06 15:34:46 crc kubenswrapper[5003]: I1206 15:34:46.258783 5003 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5dcd2ead659749d338b96f32a981bba83c9b5d0f5da5f9b5da06f570c4cde4ce" Dec 06 15:34:46 crc kubenswrapper[5003]: I1206 15:34:46.259042 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 06 15:34:46 crc kubenswrapper[5003]: I1206 15:34:46.270395 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"0b5d45f6-ad6d-44a7-a125-eebc7e38a9b8","Type":"ContainerStarted","Data":"7e24734c3c76d48fba260d1a3b2a50a9bf639fa6a31f00523feae5967ccfa606"} Dec 06 15:34:47 crc kubenswrapper[5003]: I1206 15:34:47.277573 5003 generic.go:334] "Generic (PLEG): container finished" podID="0b5d45f6-ad6d-44a7-a125-eebc7e38a9b8" containerID="7e24734c3c76d48fba260d1a3b2a50a9bf639fa6a31f00523feae5967ccfa606" exitCode=0 Dec 06 15:34:47 crc kubenswrapper[5003]: I1206 15:34:47.277624 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"0b5d45f6-ad6d-44a7-a125-eebc7e38a9b8","Type":"ContainerDied","Data":"7e24734c3c76d48fba260d1a3b2a50a9bf639fa6a31f00523feae5967ccfa606"} Dec 06 15:34:48 crc kubenswrapper[5003]: I1206 15:34:48.572674 5003 patch_prober.go:28] interesting pod/machine-config-daemon-w25db container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 06 15:34:48 crc kubenswrapper[5003]: I1206 15:34:48.572944 5003 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-w25db" podUID="1a047c4d-003e-4668-9b96-945eab34ab68" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 06 15:34:48 crc kubenswrapper[5003]: I1206 15:34:48.746834 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 06 15:34:48 crc kubenswrapper[5003]: I1206 15:34:48.885414 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/0b5d45f6-ad6d-44a7-a125-eebc7e38a9b8-kubelet-dir\") pod \"0b5d45f6-ad6d-44a7-a125-eebc7e38a9b8\" (UID: \"0b5d45f6-ad6d-44a7-a125-eebc7e38a9b8\") " Dec 06 15:34:48 crc kubenswrapper[5003]: I1206 15:34:48.885602 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b5d45f6-ad6d-44a7-a125-eebc7e38a9b8-kube-api-access\") pod \"0b5d45f6-ad6d-44a7-a125-eebc7e38a9b8\" (UID: \"0b5d45f6-ad6d-44a7-a125-eebc7e38a9b8\") " Dec 06 15:34:48 crc kubenswrapper[5003]: I1206 15:34:48.885667 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/0b5d45f6-ad6d-44a7-a125-eebc7e38a9b8-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "0b5d45f6-ad6d-44a7-a125-eebc7e38a9b8" (UID: "0b5d45f6-ad6d-44a7-a125-eebc7e38a9b8"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 06 15:34:48 crc kubenswrapper[5003]: I1206 15:34:48.885932 5003 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/0b5d45f6-ad6d-44a7-a125-eebc7e38a9b8-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 06 15:34:48 crc kubenswrapper[5003]: I1206 15:34:48.891373 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b5d45f6-ad6d-44a7-a125-eebc7e38a9b8-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "0b5d45f6-ad6d-44a7-a125-eebc7e38a9b8" (UID: "0b5d45f6-ad6d-44a7-a125-eebc7e38a9b8"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 15:34:48 crc kubenswrapper[5003]: I1206 15:34:48.987221 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b5d45f6-ad6d-44a7-a125-eebc7e38a9b8-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 06 15:34:49 crc kubenswrapper[5003]: I1206 15:34:49.189469 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/9fa121e1-7f2f-4912-945f-86cb199c3014-metrics-certs\") pod \"network-metrics-daemon-jmzd9\" (UID: \"9fa121e1-7f2f-4912-945f-86cb199c3014\") " pod="openshift-multus/network-metrics-daemon-jmzd9" Dec 06 15:34:49 crc kubenswrapper[5003]: I1206 15:34:49.208012 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/9fa121e1-7f2f-4912-945f-86cb199c3014-metrics-certs\") pod \"network-metrics-daemon-jmzd9\" (UID: \"9fa121e1-7f2f-4912-945f-86cb199c3014\") " pod="openshift-multus/network-metrics-daemon-jmzd9" Dec 06 15:34:49 crc kubenswrapper[5003]: I1206 15:34:49.335963 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"0b5d45f6-ad6d-44a7-a125-eebc7e38a9b8","Type":"ContainerDied","Data":"617921a60557531ff587b64ae54ecdeca206a9597e4f4b934d8942c12f7c85f3"} Dec 06 15:34:49 crc kubenswrapper[5003]: I1206 15:34:49.336004 5003 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="617921a60557531ff587b64ae54ecdeca206a9597e4f4b934d8942c12f7c85f3" Dec 06 15:34:49 crc kubenswrapper[5003]: I1206 15:34:49.336063 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 06 15:34:49 crc kubenswrapper[5003]: I1206 15:34:49.438636 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-jmzd9" Dec 06 15:34:50 crc kubenswrapper[5003]: I1206 15:34:50.294409 5003 patch_prober.go:28] interesting pod/downloads-7954f5f757-xs4nd container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.15:8080/\": dial tcp 10.217.0.15:8080: connect: connection refused" start-of-body= Dec 06 15:34:50 crc kubenswrapper[5003]: I1206 15:34:50.294461 5003 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-xs4nd" podUID="495babf4-9201-4523-8a21-44e001d4f4c1" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.15:8080/\": dial tcp 10.217.0.15:8080: connect: connection refused" Dec 06 15:34:50 crc kubenswrapper[5003]: I1206 15:34:50.295913 5003 patch_prober.go:28] interesting pod/downloads-7954f5f757-xs4nd container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.15:8080/\": dial tcp 10.217.0.15:8080: connect: connection refused" start-of-body= Dec 06 15:34:50 crc kubenswrapper[5003]: I1206 15:34:50.295957 5003 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-xs4nd" podUID="495babf4-9201-4523-8a21-44e001d4f4c1" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.15:8080/\": dial tcp 10.217.0.15:8080: connect: connection refused" Dec 06 15:34:50 crc kubenswrapper[5003]: I1206 15:34:50.413370 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-f9d7485db-npbcn" Dec 06 15:34:50 crc kubenswrapper[5003]: I1206 15:34:50.420866 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-f9d7485db-npbcn" Dec 06 15:34:58 crc kubenswrapper[5003]: I1206 15:34:58.396701 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-697d97f7c8-tvwvh" Dec 06 15:35:00 crc kubenswrapper[5003]: I1206 15:35:00.305754 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/downloads-7954f5f757-xs4nd" Dec 06 15:35:10 crc kubenswrapper[5003]: I1206 15:35:10.927558 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-dc5hk" Dec 06 15:35:11 crc kubenswrapper[5003]: I1206 15:35:11.096784 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 06 15:35:17 crc kubenswrapper[5003]: I1206 15:35:17.828408 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Dec 06 15:35:17 crc kubenswrapper[5003]: E1206 15:35:17.829175 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="de788a8f-3932-4514-8bc4-c808ef57661e" containerName="pruner" Dec 06 15:35:17 crc kubenswrapper[5003]: I1206 15:35:17.829191 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="de788a8f-3932-4514-8bc4-c808ef57661e" containerName="pruner" Dec 06 15:35:17 crc kubenswrapper[5003]: E1206 15:35:17.829206 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0b5d45f6-ad6d-44a7-a125-eebc7e38a9b8" containerName="pruner" Dec 06 15:35:17 crc kubenswrapper[5003]: I1206 15:35:17.829214 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="0b5d45f6-ad6d-44a7-a125-eebc7e38a9b8" containerName="pruner" Dec 06 15:35:17 crc kubenswrapper[5003]: I1206 15:35:17.829460 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="de788a8f-3932-4514-8bc4-c808ef57661e" containerName="pruner" Dec 06 15:35:17 crc kubenswrapper[5003]: I1206 15:35:17.829476 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="0b5d45f6-ad6d-44a7-a125-eebc7e38a9b8" containerName="pruner" Dec 06 15:35:17 crc kubenswrapper[5003]: I1206 15:35:17.831267 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 06 15:35:17 crc kubenswrapper[5003]: I1206 15:35:17.831995 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/21cd71a4-5614-4f8c-b9fd-a1ab2b21c363-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"21cd71a4-5614-4f8c-b9fd-a1ab2b21c363\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 06 15:35:17 crc kubenswrapper[5003]: I1206 15:35:17.832120 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/21cd71a4-5614-4f8c-b9fd-a1ab2b21c363-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"21cd71a4-5614-4f8c-b9fd-a1ab2b21c363\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 06 15:35:17 crc kubenswrapper[5003]: I1206 15:35:17.833139 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Dec 06 15:35:17 crc kubenswrapper[5003]: I1206 15:35:17.834043 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Dec 06 15:35:17 crc kubenswrapper[5003]: I1206 15:35:17.844646 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Dec 06 15:35:17 crc kubenswrapper[5003]: I1206 15:35:17.941060 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/21cd71a4-5614-4f8c-b9fd-a1ab2b21c363-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"21cd71a4-5614-4f8c-b9fd-a1ab2b21c363\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 06 15:35:17 crc kubenswrapper[5003]: I1206 15:35:17.941589 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/21cd71a4-5614-4f8c-b9fd-a1ab2b21c363-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"21cd71a4-5614-4f8c-b9fd-a1ab2b21c363\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 06 15:35:17 crc kubenswrapper[5003]: I1206 15:35:17.941698 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/21cd71a4-5614-4f8c-b9fd-a1ab2b21c363-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"21cd71a4-5614-4f8c-b9fd-a1ab2b21c363\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 06 15:35:17 crc kubenswrapper[5003]: I1206 15:35:17.964257 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/21cd71a4-5614-4f8c-b9fd-a1ab2b21c363-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"21cd71a4-5614-4f8c-b9fd-a1ab2b21c363\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 06 15:35:18 crc kubenswrapper[5003]: I1206 15:35:18.153958 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 06 15:35:18 crc kubenswrapper[5003]: I1206 15:35:18.572803 5003 patch_prober.go:28] interesting pod/machine-config-daemon-w25db container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 06 15:35:18 crc kubenswrapper[5003]: I1206 15:35:18.573734 5003 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-w25db" podUID="1a047c4d-003e-4668-9b96-945eab34ab68" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 06 15:35:23 crc kubenswrapper[5003]: I1206 15:35:23.226989 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Dec 06 15:35:23 crc kubenswrapper[5003]: I1206 15:35:23.228170 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 06 15:35:23 crc kubenswrapper[5003]: I1206 15:35:23.230985 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Dec 06 15:35:23 crc kubenswrapper[5003]: I1206 15:35:23.327170 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/7a65c8f8-e5cf-471d-bbf1-2c541164cdf8-kube-api-access\") pod \"installer-9-crc\" (UID: \"7a65c8f8-e5cf-471d-bbf1-2c541164cdf8\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 06 15:35:23 crc kubenswrapper[5003]: I1206 15:35:23.327227 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/7a65c8f8-e5cf-471d-bbf1-2c541164cdf8-var-lock\") pod \"installer-9-crc\" (UID: \"7a65c8f8-e5cf-471d-bbf1-2c541164cdf8\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 06 15:35:23 crc kubenswrapper[5003]: I1206 15:35:23.327256 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/7a65c8f8-e5cf-471d-bbf1-2c541164cdf8-kubelet-dir\") pod \"installer-9-crc\" (UID: \"7a65c8f8-e5cf-471d-bbf1-2c541164cdf8\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 06 15:35:23 crc kubenswrapper[5003]: I1206 15:35:23.427899 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/7a65c8f8-e5cf-471d-bbf1-2c541164cdf8-kube-api-access\") pod \"installer-9-crc\" (UID: \"7a65c8f8-e5cf-471d-bbf1-2c541164cdf8\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 06 15:35:23 crc kubenswrapper[5003]: I1206 15:35:23.427967 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/7a65c8f8-e5cf-471d-bbf1-2c541164cdf8-var-lock\") pod \"installer-9-crc\" (UID: \"7a65c8f8-e5cf-471d-bbf1-2c541164cdf8\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 06 15:35:23 crc kubenswrapper[5003]: I1206 15:35:23.427997 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/7a65c8f8-e5cf-471d-bbf1-2c541164cdf8-kubelet-dir\") pod \"installer-9-crc\" (UID: \"7a65c8f8-e5cf-471d-bbf1-2c541164cdf8\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 06 15:35:23 crc kubenswrapper[5003]: I1206 15:35:23.428105 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/7a65c8f8-e5cf-471d-bbf1-2c541164cdf8-kubelet-dir\") pod \"installer-9-crc\" (UID: \"7a65c8f8-e5cf-471d-bbf1-2c541164cdf8\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 06 15:35:23 crc kubenswrapper[5003]: I1206 15:35:23.428198 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/7a65c8f8-e5cf-471d-bbf1-2c541164cdf8-var-lock\") pod \"installer-9-crc\" (UID: \"7a65c8f8-e5cf-471d-bbf1-2c541164cdf8\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 06 15:35:23 crc kubenswrapper[5003]: I1206 15:35:23.450593 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/7a65c8f8-e5cf-471d-bbf1-2c541164cdf8-kube-api-access\") pod \"installer-9-crc\" (UID: \"7a65c8f8-e5cf-471d-bbf1-2c541164cdf8\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 06 15:35:23 crc kubenswrapper[5003]: I1206 15:35:23.545931 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 06 15:35:24 crc kubenswrapper[5003]: E1206 15:35:24.952565 5003 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Dec 06 15:35:24 crc kubenswrapper[5003]: E1206 15:35:24.952959 5003 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-qbncs,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-mbw9t_openshift-marketplace(7370c672-28ad-4228-8285-c113c6675ba8): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 06 15:35:24 crc kubenswrapper[5003]: E1206 15:35:24.954230 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-operators-mbw9t" podUID="7370c672-28ad-4228-8285-c113c6675ba8" Dec 06 15:35:26 crc kubenswrapper[5003]: E1206 15:35:26.710078 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-mbw9t" podUID="7370c672-28ad-4228-8285-c113c6675ba8" Dec 06 15:35:26 crc kubenswrapper[5003]: E1206 15:35:26.793435 5003 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Dec 06 15:35:26 crc kubenswrapper[5003]: E1206 15:35:26.793598 5003 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-zw5mx,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-jqxj7_openshift-marketplace(dde2226a-d12b-4c3b-a396-cf72781488ca): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 06 15:35:26 crc kubenswrapper[5003]: E1206 15:35:26.794733 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-jqxj7" podUID="dde2226a-d12b-4c3b-a396-cf72781488ca" Dec 06 15:35:26 crc kubenswrapper[5003]: E1206 15:35:26.811966 5003 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Dec 06 15:35:26 crc kubenswrapper[5003]: E1206 15:35:26.812144 5003 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-746fx,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-gwpxn_openshift-marketplace(158c5179-aee8-4e5b-8a39-38d19808b3fd): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 06 15:35:26 crc kubenswrapper[5003]: E1206 15:35:26.815340 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-operators-gwpxn" podUID="158c5179-aee8-4e5b-8a39-38d19808b3fd" Dec 06 15:35:27 crc kubenswrapper[5003]: E1206 15:35:27.853192 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-jqxj7" podUID="dde2226a-d12b-4c3b-a396-cf72781488ca" Dec 06 15:35:27 crc kubenswrapper[5003]: E1206 15:35:27.853287 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-gwpxn" podUID="158c5179-aee8-4e5b-8a39-38d19808b3fd" Dec 06 15:35:27 crc kubenswrapper[5003]: E1206 15:35:27.917807 5003 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Dec 06 15:35:27 crc kubenswrapper[5003]: E1206 15:35:27.917988 5003 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-w69jz,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-b5rnp_openshift-marketplace(67506930-842d-411a-b032-26874042995d): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 06 15:35:27 crc kubenswrapper[5003]: E1206 15:35:27.919224 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-b5rnp" podUID="67506930-842d-411a-b032-26874042995d" Dec 06 15:35:29 crc kubenswrapper[5003]: E1206 15:35:29.354640 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-b5rnp" podUID="67506930-842d-411a-b032-26874042995d" Dec 06 15:35:29 crc kubenswrapper[5003]: E1206 15:35:29.423998 5003 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Dec 06 15:35:29 crc kubenswrapper[5003]: E1206 15:35:29.424288 5003 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-tsk7n,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-x2bhv_openshift-marketplace(a73c8333-dbdd-447b-b940-5aca2f15d00d): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 06 15:35:29 crc kubenswrapper[5003]: E1206 15:35:29.425467 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-x2bhv" podUID="a73c8333-dbdd-447b-b940-5aca2f15d00d" Dec 06 15:35:29 crc kubenswrapper[5003]: E1206 15:35:29.440988 5003 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Dec 06 15:35:29 crc kubenswrapper[5003]: E1206 15:35:29.441175 5003 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-mq5bv,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-pzkrw_openshift-marketplace(b7b85911-8f34-416f-b3f9-2776b2aa7876): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 06 15:35:29 crc kubenswrapper[5003]: E1206 15:35:29.442322 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-pzkrw" podUID="b7b85911-8f34-416f-b3f9-2776b2aa7876" Dec 06 15:35:29 crc kubenswrapper[5003]: E1206 15:35:29.490038 5003 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Dec 06 15:35:29 crc kubenswrapper[5003]: E1206 15:35:29.490202 5003 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-d2scn,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-x4xqj_openshift-marketplace(78c47ecf-a4ad-4ac0-85f9-66fa3c8651b4): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 06 15:35:29 crc kubenswrapper[5003]: E1206 15:35:29.491940 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-x4xqj" podUID="78c47ecf-a4ad-4ac0-85f9-66fa3c8651b4" Dec 06 15:35:29 crc kubenswrapper[5003]: E1206 15:35:29.494044 5003 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Dec 06 15:35:29 crc kubenswrapper[5003]: E1206 15:35:29.494440 5003 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-d788v,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-gtwdh_openshift-marketplace(6c5a30dc-06ca-435f-81b9-576f03f05a19): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 06 15:35:29 crc kubenswrapper[5003]: E1206 15:35:29.495717 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-gtwdh" podUID="6c5a30dc-06ca-435f-81b9-576f03f05a19" Dec 06 15:35:29 crc kubenswrapper[5003]: E1206 15:35:29.708932 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-gtwdh" podUID="6c5a30dc-06ca-435f-81b9-576f03f05a19" Dec 06 15:35:29 crc kubenswrapper[5003]: E1206 15:35:29.709153 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-x4xqj" podUID="78c47ecf-a4ad-4ac0-85f9-66fa3c8651b4" Dec 06 15:35:29 crc kubenswrapper[5003]: E1206 15:35:29.709202 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-pzkrw" podUID="b7b85911-8f34-416f-b3f9-2776b2aa7876" Dec 06 15:35:29 crc kubenswrapper[5003]: E1206 15:35:29.709803 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-x2bhv" podUID="a73c8333-dbdd-447b-b940-5aca2f15d00d" Dec 06 15:35:29 crc kubenswrapper[5003]: I1206 15:35:29.824193 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Dec 06 15:35:29 crc kubenswrapper[5003]: I1206 15:35:29.875364 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-jmzd9"] Dec 06 15:35:29 crc kubenswrapper[5003]: W1206 15:35:29.901842 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9fa121e1_7f2f_4912_945f_86cb199c3014.slice/crio-f07c12687e9c233d96aac9f13190c4decd9a44a8a0bbf36164fc46e0ac0f7abc WatchSource:0}: Error finding container f07c12687e9c233d96aac9f13190c4decd9a44a8a0bbf36164fc46e0ac0f7abc: Status 404 returned error can't find the container with id f07c12687e9c233d96aac9f13190c4decd9a44a8a0bbf36164fc46e0ac0f7abc Dec 06 15:35:29 crc kubenswrapper[5003]: I1206 15:35:29.913789 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Dec 06 15:35:30 crc kubenswrapper[5003]: I1206 15:35:30.716420 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-jmzd9" event={"ID":"9fa121e1-7f2f-4912-945f-86cb199c3014","Type":"ContainerStarted","Data":"ed5bc4434dcc5a1904b8c10ba2de06e4d7d8ff986f4ff2053d2e1259740761c0"} Dec 06 15:35:30 crc kubenswrapper[5003]: I1206 15:35:30.717335 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-jmzd9" event={"ID":"9fa121e1-7f2f-4912-945f-86cb199c3014","Type":"ContainerStarted","Data":"f07c12687e9c233d96aac9f13190c4decd9a44a8a0bbf36164fc46e0ac0f7abc"} Dec 06 15:35:30 crc kubenswrapper[5003]: I1206 15:35:30.722164 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"21cd71a4-5614-4f8c-b9fd-a1ab2b21c363","Type":"ContainerStarted","Data":"bbb4a1fb5c5b5a5734e0ff9b85a62aad7c2aba3d19e2412a09c4fbaf45ccbf16"} Dec 06 15:35:30 crc kubenswrapper[5003]: I1206 15:35:30.722235 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"21cd71a4-5614-4f8c-b9fd-a1ab2b21c363","Type":"ContainerStarted","Data":"26c80b4ab5dfdd3c3506503d37cb2f61dfe45b6f182c05f86d2f95b475f4ecbc"} Dec 06 15:35:30 crc kubenswrapper[5003]: I1206 15:35:30.726993 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"7a65c8f8-e5cf-471d-bbf1-2c541164cdf8","Type":"ContainerStarted","Data":"a7ec154a4d732cd6f0371e157fa8ff6aed0c56803b19e90d35a0257a34967e5e"} Dec 06 15:35:30 crc kubenswrapper[5003]: I1206 15:35:30.727049 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"7a65c8f8-e5cf-471d-bbf1-2c541164cdf8","Type":"ContainerStarted","Data":"60b34747c08e62f951fb6f31f8923a5aedeedbd97bda433dac45235b8626b41a"} Dec 06 15:35:30 crc kubenswrapper[5003]: I1206 15:35:30.750025 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/revision-pruner-9-crc" podStartSLOduration=13.750004389 podStartE2EDuration="13.750004389s" podCreationTimestamp="2025-12-06 15:35:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 15:35:30.746099985 +0000 UTC m=+209.279454406" watchObservedRunningTime="2025-12-06 15:35:30.750004389 +0000 UTC m=+209.283358800" Dec 06 15:35:30 crc kubenswrapper[5003]: I1206 15:35:30.765339 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/installer-9-crc" podStartSLOduration=7.765321067 podStartE2EDuration="7.765321067s" podCreationTimestamp="2025-12-06 15:35:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 15:35:30.763835567 +0000 UTC m=+209.297189948" watchObservedRunningTime="2025-12-06 15:35:30.765321067 +0000 UTC m=+209.298675478" Dec 06 15:35:31 crc kubenswrapper[5003]: I1206 15:35:31.734423 5003 generic.go:334] "Generic (PLEG): container finished" podID="21cd71a4-5614-4f8c-b9fd-a1ab2b21c363" containerID="bbb4a1fb5c5b5a5734e0ff9b85a62aad7c2aba3d19e2412a09c4fbaf45ccbf16" exitCode=0 Dec 06 15:35:31 crc kubenswrapper[5003]: I1206 15:35:31.734535 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"21cd71a4-5614-4f8c-b9fd-a1ab2b21c363","Type":"ContainerDied","Data":"bbb4a1fb5c5b5a5734e0ff9b85a62aad7c2aba3d19e2412a09c4fbaf45ccbf16"} Dec 06 15:35:31 crc kubenswrapper[5003]: I1206 15:35:31.736715 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-jmzd9" event={"ID":"9fa121e1-7f2f-4912-945f-86cb199c3014","Type":"ContainerStarted","Data":"229902feebcdf524aca54c49ead57005f4cd2c6ec4eb9f59a980f5bde360282c"} Dec 06 15:35:31 crc kubenswrapper[5003]: I1206 15:35:31.764462 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/network-metrics-daemon-jmzd9" podStartSLOduration=185.76444713 podStartE2EDuration="3m5.76444713s" podCreationTimestamp="2025-12-06 15:32:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 15:35:31.763398461 +0000 UTC m=+210.296752862" watchObservedRunningTime="2025-12-06 15:35:31.76444713 +0000 UTC m=+210.297801511" Dec 06 15:35:32 crc kubenswrapper[5003]: I1206 15:35:32.966075 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 06 15:35:33 crc kubenswrapper[5003]: I1206 15:35:33.058387 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/21cd71a4-5614-4f8c-b9fd-a1ab2b21c363-kube-api-access\") pod \"21cd71a4-5614-4f8c-b9fd-a1ab2b21c363\" (UID: \"21cd71a4-5614-4f8c-b9fd-a1ab2b21c363\") " Dec 06 15:35:33 crc kubenswrapper[5003]: I1206 15:35:33.058739 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/21cd71a4-5614-4f8c-b9fd-a1ab2b21c363-kubelet-dir\") pod \"21cd71a4-5614-4f8c-b9fd-a1ab2b21c363\" (UID: \"21cd71a4-5614-4f8c-b9fd-a1ab2b21c363\") " Dec 06 15:35:33 crc kubenswrapper[5003]: I1206 15:35:33.058860 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/21cd71a4-5614-4f8c-b9fd-a1ab2b21c363-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "21cd71a4-5614-4f8c-b9fd-a1ab2b21c363" (UID: "21cd71a4-5614-4f8c-b9fd-a1ab2b21c363"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 06 15:35:33 crc kubenswrapper[5003]: I1206 15:35:33.059026 5003 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/21cd71a4-5614-4f8c-b9fd-a1ab2b21c363-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 06 15:35:33 crc kubenswrapper[5003]: I1206 15:35:33.065310 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/21cd71a4-5614-4f8c-b9fd-a1ab2b21c363-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "21cd71a4-5614-4f8c-b9fd-a1ab2b21c363" (UID: "21cd71a4-5614-4f8c-b9fd-a1ab2b21c363"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 15:35:33 crc kubenswrapper[5003]: I1206 15:35:33.163179 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/21cd71a4-5614-4f8c-b9fd-a1ab2b21c363-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 06 15:35:33 crc kubenswrapper[5003]: I1206 15:35:33.749558 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"21cd71a4-5614-4f8c-b9fd-a1ab2b21c363","Type":"ContainerDied","Data":"26c80b4ab5dfdd3c3506503d37cb2f61dfe45b6f182c05f86d2f95b475f4ecbc"} Dec 06 15:35:33 crc kubenswrapper[5003]: I1206 15:35:33.749841 5003 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="26c80b4ab5dfdd3c3506503d37cb2f61dfe45b6f182c05f86d2f95b475f4ecbc" Dec 06 15:35:33 crc kubenswrapper[5003]: I1206 15:35:33.749808 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 06 15:35:44 crc kubenswrapper[5003]: I1206 15:35:44.811258 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gwpxn" event={"ID":"158c5179-aee8-4e5b-8a39-38d19808b3fd","Type":"ContainerStarted","Data":"81056a0f7fdeec459dbb4a85d703e18881bd9542ae62284cf33f2b4f02ceecd5"} Dec 06 15:35:44 crc kubenswrapper[5003]: I1206 15:35:44.812927 5003 generic.go:334] "Generic (PLEG): container finished" podID="dde2226a-d12b-4c3b-a396-cf72781488ca" containerID="ed8aaaf11b223ff3ddd1e723f46807be41455a65ce0c349a8c1d7ed2c322f105" exitCode=0 Dec 06 15:35:44 crc kubenswrapper[5003]: I1206 15:35:44.812978 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jqxj7" event={"ID":"dde2226a-d12b-4c3b-a396-cf72781488ca","Type":"ContainerDied","Data":"ed8aaaf11b223ff3ddd1e723f46807be41455a65ce0c349a8c1d7ed2c322f105"} Dec 06 15:35:44 crc kubenswrapper[5003]: I1206 15:35:44.817197 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mbw9t" event={"ID":"7370c672-28ad-4228-8285-c113c6675ba8","Type":"ContainerStarted","Data":"87f7a0f1c7eacadd76d10b88264818cc1bb8b5a5d5fae61a0843d9bf22575f2f"} Dec 06 15:35:44 crc kubenswrapper[5003]: I1206 15:35:44.820633 5003 generic.go:334] "Generic (PLEG): container finished" podID="67506930-842d-411a-b032-26874042995d" containerID="dfc4979fa8c3b5bab62b723ac7719bdbbc226ca085488ff2b437bf4809997b3d" exitCode=0 Dec 06 15:35:44 crc kubenswrapper[5003]: I1206 15:35:44.820706 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-b5rnp" event={"ID":"67506930-842d-411a-b032-26874042995d","Type":"ContainerDied","Data":"dfc4979fa8c3b5bab62b723ac7719bdbbc226ca085488ff2b437bf4809997b3d"} Dec 06 15:35:44 crc kubenswrapper[5003]: I1206 15:35:44.823753 5003 generic.go:334] "Generic (PLEG): container finished" podID="78c47ecf-a4ad-4ac0-85f9-66fa3c8651b4" containerID="aafd5ddef69565d6ba10d6c654d0969bcc3e7d91226b4e25b63cfee57341700b" exitCode=0 Dec 06 15:35:44 crc kubenswrapper[5003]: I1206 15:35:44.823839 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-x4xqj" event={"ID":"78c47ecf-a4ad-4ac0-85f9-66fa3c8651b4","Type":"ContainerDied","Data":"aafd5ddef69565d6ba10d6c654d0969bcc3e7d91226b4e25b63cfee57341700b"} Dec 06 15:35:44 crc kubenswrapper[5003]: I1206 15:35:44.827870 5003 generic.go:334] "Generic (PLEG): container finished" podID="b7b85911-8f34-416f-b3f9-2776b2aa7876" containerID="294267902fc5c1a281b441b90ad4ff374c9feeb95876c885957e4ae654807eb7" exitCode=0 Dec 06 15:35:44 crc kubenswrapper[5003]: I1206 15:35:44.827932 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pzkrw" event={"ID":"b7b85911-8f34-416f-b3f9-2776b2aa7876","Type":"ContainerDied","Data":"294267902fc5c1a281b441b90ad4ff374c9feeb95876c885957e4ae654807eb7"} Dec 06 15:35:44 crc kubenswrapper[5003]: I1206 15:35:44.833006 5003 generic.go:334] "Generic (PLEG): container finished" podID="a73c8333-dbdd-447b-b940-5aca2f15d00d" containerID="e8282c0d23957b5f2c04699a19f54f397f94a6301ae942de9cbc4319a4bfbd9f" exitCode=0 Dec 06 15:35:44 crc kubenswrapper[5003]: I1206 15:35:44.833123 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-x2bhv" event={"ID":"a73c8333-dbdd-447b-b940-5aca2f15d00d","Type":"ContainerDied","Data":"e8282c0d23957b5f2c04699a19f54f397f94a6301ae942de9cbc4319a4bfbd9f"} Dec 06 15:35:45 crc kubenswrapper[5003]: I1206 15:35:45.847056 5003 generic.go:334] "Generic (PLEG): container finished" podID="158c5179-aee8-4e5b-8a39-38d19808b3fd" containerID="81056a0f7fdeec459dbb4a85d703e18881bd9542ae62284cf33f2b4f02ceecd5" exitCode=0 Dec 06 15:35:45 crc kubenswrapper[5003]: I1206 15:35:45.847136 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gwpxn" event={"ID":"158c5179-aee8-4e5b-8a39-38d19808b3fd","Type":"ContainerDied","Data":"81056a0f7fdeec459dbb4a85d703e18881bd9542ae62284cf33f2b4f02ceecd5"} Dec 06 15:35:45 crc kubenswrapper[5003]: I1206 15:35:45.849923 5003 generic.go:334] "Generic (PLEG): container finished" podID="7370c672-28ad-4228-8285-c113c6675ba8" containerID="87f7a0f1c7eacadd76d10b88264818cc1bb8b5a5d5fae61a0843d9bf22575f2f" exitCode=0 Dec 06 15:35:45 crc kubenswrapper[5003]: I1206 15:35:45.849985 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mbw9t" event={"ID":"7370c672-28ad-4228-8285-c113c6675ba8","Type":"ContainerDied","Data":"87f7a0f1c7eacadd76d10b88264818cc1bb8b5a5d5fae61a0843d9bf22575f2f"} Dec 06 15:35:46 crc kubenswrapper[5003]: I1206 15:35:46.857661 5003 generic.go:334] "Generic (PLEG): container finished" podID="6c5a30dc-06ca-435f-81b9-576f03f05a19" containerID="f905ccfbc05a78b505e1d01b71a3d3802d9fc897e9de695484f99225a1098306" exitCode=0 Dec 06 15:35:46 crc kubenswrapper[5003]: I1206 15:35:46.857748 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gtwdh" event={"ID":"6c5a30dc-06ca-435f-81b9-576f03f05a19","Type":"ContainerDied","Data":"f905ccfbc05a78b505e1d01b71a3d3802d9fc897e9de695484f99225a1098306"} Dec 06 15:35:46 crc kubenswrapper[5003]: I1206 15:35:46.861551 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jqxj7" event={"ID":"dde2226a-d12b-4c3b-a396-cf72781488ca","Type":"ContainerStarted","Data":"e7a3ca9c9cad0d324fec3f7433c330d72ce5181cd384e7d96107998891afba2d"} Dec 06 15:35:47 crc kubenswrapper[5003]: I1206 15:35:47.895845 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-jqxj7" podStartSLOduration=4.720978833 podStartE2EDuration="1m9.895824247s" podCreationTimestamp="2025-12-06 15:34:38 +0000 UTC" firstStartedPulling="2025-12-06 15:34:40.617393444 +0000 UTC m=+159.150747825" lastFinishedPulling="2025-12-06 15:35:45.792238857 +0000 UTC m=+224.325593239" observedRunningTime="2025-12-06 15:35:47.892862209 +0000 UTC m=+226.426216600" watchObservedRunningTime="2025-12-06 15:35:47.895824247 +0000 UTC m=+226.429178628" Dec 06 15:35:48 crc kubenswrapper[5003]: I1206 15:35:48.576098 5003 patch_prober.go:28] interesting pod/machine-config-daemon-w25db container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 06 15:35:48 crc kubenswrapper[5003]: I1206 15:35:48.576178 5003 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-w25db" podUID="1a047c4d-003e-4668-9b96-945eab34ab68" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 06 15:35:48 crc kubenswrapper[5003]: I1206 15:35:48.576241 5003 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-w25db" Dec 06 15:35:48 crc kubenswrapper[5003]: I1206 15:35:48.578335 5003 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"b457ba1d45f5e1524b8539c4c7dc77b24f6d5aa8172b46962e31e4fb6723b7ba"} pod="openshift-machine-config-operator/machine-config-daemon-w25db" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 06 15:35:48 crc kubenswrapper[5003]: I1206 15:35:48.578466 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-w25db" podUID="1a047c4d-003e-4668-9b96-945eab34ab68" containerName="machine-config-daemon" containerID="cri-o://b457ba1d45f5e1524b8539c4c7dc77b24f6d5aa8172b46962e31e4fb6723b7ba" gracePeriod=600 Dec 06 15:35:49 crc kubenswrapper[5003]: I1206 15:35:49.439843 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-tsg4h"] Dec 06 15:35:49 crc kubenswrapper[5003]: I1206 15:35:49.890005 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pzkrw" event={"ID":"b7b85911-8f34-416f-b3f9-2776b2aa7876","Type":"ContainerStarted","Data":"cee0fa2fc0e2e8e2da2c83ec1a8892ba4b24e6caa9e9d00f2efa10d023de603f"} Dec 06 15:35:49 crc kubenswrapper[5003]: I1206 15:35:49.894337 5003 generic.go:334] "Generic (PLEG): container finished" podID="1a047c4d-003e-4668-9b96-945eab34ab68" containerID="b457ba1d45f5e1524b8539c4c7dc77b24f6d5aa8172b46962e31e4fb6723b7ba" exitCode=0 Dec 06 15:35:49 crc kubenswrapper[5003]: I1206 15:35:49.894384 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-w25db" event={"ID":"1a047c4d-003e-4668-9b96-945eab34ab68","Type":"ContainerDied","Data":"b457ba1d45f5e1524b8539c4c7dc77b24f6d5aa8172b46962e31e4fb6723b7ba"} Dec 06 15:35:49 crc kubenswrapper[5003]: I1206 15:35:49.921247 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-pzkrw" podStartSLOduration=5.564787484 podStartE2EDuration="1m8.921225425s" podCreationTimestamp="2025-12-06 15:34:41 +0000 UTC" firstStartedPulling="2025-12-06 15:34:44.178593753 +0000 UTC m=+162.711948134" lastFinishedPulling="2025-12-06 15:35:47.535031694 +0000 UTC m=+226.068386075" observedRunningTime="2025-12-06 15:35:49.917534127 +0000 UTC m=+228.450888518" watchObservedRunningTime="2025-12-06 15:35:49.921225425 +0000 UTC m=+228.454579806" Dec 06 15:35:50 crc kubenswrapper[5003]: I1206 15:35:50.025772 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-jqxj7" Dec 06 15:35:50 crc kubenswrapper[5003]: I1206 15:35:50.025823 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-jqxj7" Dec 06 15:35:51 crc kubenswrapper[5003]: I1206 15:35:51.329298 5003 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/certified-operators-jqxj7" podUID="dde2226a-d12b-4c3b-a396-cf72781488ca" containerName="registry-server" probeResult="failure" output=< Dec 06 15:35:51 crc kubenswrapper[5003]: timeout: failed to connect service ":50051" within 1s Dec 06 15:35:51 crc kubenswrapper[5003]: > Dec 06 15:35:51 crc kubenswrapper[5003]: I1206 15:35:51.659525 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-pzkrw" Dec 06 15:35:51 crc kubenswrapper[5003]: I1206 15:35:51.659580 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-pzkrw" Dec 06 15:35:52 crc kubenswrapper[5003]: I1206 15:35:52.025625 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-pzkrw" Dec 06 15:35:52 crc kubenswrapper[5003]: I1206 15:35:52.916351 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-x2bhv" event={"ID":"a73c8333-dbdd-447b-b940-5aca2f15d00d","Type":"ContainerStarted","Data":"28312340308c6c2ad9ff05fda8438cab72ec2637a9e8248893c070f3d28ed811"} Dec 06 15:35:52 crc kubenswrapper[5003]: I1206 15:35:52.918710 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-w25db" event={"ID":"1a047c4d-003e-4668-9b96-945eab34ab68","Type":"ContainerStarted","Data":"1af03e52d38341c59b3fe6c255d6e745c45e38f27243b62c0f773f1214c39c22"} Dec 06 15:35:53 crc kubenswrapper[5003]: I1206 15:35:53.944608 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-x2bhv" podStartSLOduration=5.306204243 podStartE2EDuration="1m14.94459196s" podCreationTimestamp="2025-12-06 15:34:39 +0000 UTC" firstStartedPulling="2025-12-06 15:34:40.610430847 +0000 UTC m=+159.143785238" lastFinishedPulling="2025-12-06 15:35:50.248818574 +0000 UTC m=+228.782172955" observedRunningTime="2025-12-06 15:35:53.943900752 +0000 UTC m=+232.477255153" watchObservedRunningTime="2025-12-06 15:35:53.94459196 +0000 UTC m=+232.477946341" Dec 06 15:35:55 crc kubenswrapper[5003]: I1206 15:35:55.975232 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gwpxn" event={"ID":"158c5179-aee8-4e5b-8a39-38d19808b3fd","Type":"ContainerStarted","Data":"0c74c29d9df46fb3d47a5b5adb74059b162b5cff305b3b5845b3876bd7ae87f0"} Dec 06 15:35:56 crc kubenswrapper[5003]: I1206 15:35:56.042467 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mbw9t" event={"ID":"7370c672-28ad-4228-8285-c113c6675ba8","Type":"ContainerStarted","Data":"a6e05613c5b9fca2c80befa1b17e35f07d84b3f182346f87fdc15363be8df4f4"} Dec 06 15:35:56 crc kubenswrapper[5003]: I1206 15:35:56.054424 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-b5rnp" event={"ID":"67506930-842d-411a-b032-26874042995d","Type":"ContainerStarted","Data":"4ecc296f8d469b13572f3088613c6f5307d365952b1d0bae6750bc39ee54f433"} Dec 06 15:35:56 crc kubenswrapper[5003]: I1206 15:35:56.063694 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-x4xqj" event={"ID":"78c47ecf-a4ad-4ac0-85f9-66fa3c8651b4","Type":"ContainerStarted","Data":"ef958b7c591597101d3db38b2c6ffd23bbc2b747c581e47af384853dea5b0c20"} Dec 06 15:35:56 crc kubenswrapper[5003]: I1206 15:35:56.066013 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gtwdh" event={"ID":"6c5a30dc-06ca-435f-81b9-576f03f05a19","Type":"ContainerStarted","Data":"e4b5fb7a09cbb912f18b48dddb3dffa5249d76b83fcfc068a3c747f22b2389a3"} Dec 06 15:35:56 crc kubenswrapper[5003]: I1206 15:35:56.071664 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-gwpxn" podStartSLOduration=3.093543702 podStartE2EDuration="1m14.071648657s" podCreationTimestamp="2025-12-06 15:34:42 +0000 UTC" firstStartedPulling="2025-12-06 15:34:44.173124307 +0000 UTC m=+162.706478688" lastFinishedPulling="2025-12-06 15:35:55.151229262 +0000 UTC m=+233.684583643" observedRunningTime="2025-12-06 15:35:56.048012508 +0000 UTC m=+234.581366909" watchObservedRunningTime="2025-12-06 15:35:56.071648657 +0000 UTC m=+234.605003028" Dec 06 15:35:56 crc kubenswrapper[5003]: I1206 15:35:56.072280 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-mbw9t" podStartSLOduration=3.306924312 podStartE2EDuration="1m14.072275234s" podCreationTimestamp="2025-12-06 15:34:42 +0000 UTC" firstStartedPulling="2025-12-06 15:34:44.199324849 +0000 UTC m=+162.732679230" lastFinishedPulling="2025-12-06 15:35:54.964675751 +0000 UTC m=+233.498030152" observedRunningTime="2025-12-06 15:35:56.069794648 +0000 UTC m=+234.603149029" watchObservedRunningTime="2025-12-06 15:35:56.072275234 +0000 UTC m=+234.605629615" Dec 06 15:35:56 crc kubenswrapper[5003]: I1206 15:35:56.086830 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-gtwdh" podStartSLOduration=2.389023644 podStartE2EDuration="1m17.086814762s" podCreationTimestamp="2025-12-06 15:34:39 +0000 UTC" firstStartedPulling="2025-12-06 15:34:40.605762493 +0000 UTC m=+159.139116874" lastFinishedPulling="2025-12-06 15:35:55.303553611 +0000 UTC m=+233.836907992" observedRunningTime="2025-12-06 15:35:56.085174608 +0000 UTC m=+234.618528999" watchObservedRunningTime="2025-12-06 15:35:56.086814762 +0000 UTC m=+234.620169143" Dec 06 15:35:56 crc kubenswrapper[5003]: I1206 15:35:56.100746 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-x4xqj" podStartSLOduration=3.845925704 podStartE2EDuration="1m17.100731073s" podCreationTimestamp="2025-12-06 15:34:39 +0000 UTC" firstStartedPulling="2025-12-06 15:34:42.088294085 +0000 UTC m=+160.621648456" lastFinishedPulling="2025-12-06 15:35:55.343099444 +0000 UTC m=+233.876453825" observedRunningTime="2025-12-06 15:35:56.098325128 +0000 UTC m=+234.631679509" watchObservedRunningTime="2025-12-06 15:35:56.100731073 +0000 UTC m=+234.634085454" Dec 06 15:35:56 crc kubenswrapper[5003]: I1206 15:35:56.125788 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-b5rnp" podStartSLOduration=4.982406587 podStartE2EDuration="1m16.1257688s" podCreationTimestamp="2025-12-06 15:34:40 +0000 UTC" firstStartedPulling="2025-12-06 15:34:44.199732661 +0000 UTC m=+162.733087042" lastFinishedPulling="2025-12-06 15:35:55.343094874 +0000 UTC m=+233.876449255" observedRunningTime="2025-12-06 15:35:56.124758422 +0000 UTC m=+234.658112813" watchObservedRunningTime="2025-12-06 15:35:56.1257688 +0000 UTC m=+234.659123191" Dec 06 15:35:59 crc kubenswrapper[5003]: I1206 15:35:59.511466 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-gtwdh" Dec 06 15:35:59 crc kubenswrapper[5003]: I1206 15:35:59.511813 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-gtwdh" Dec 06 15:35:59 crc kubenswrapper[5003]: I1206 15:35:59.596642 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-gtwdh" Dec 06 15:35:59 crc kubenswrapper[5003]: I1206 15:35:59.877662 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-x2bhv" Dec 06 15:35:59 crc kubenswrapper[5003]: I1206 15:35:59.879039 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-x2bhv" Dec 06 15:35:59 crc kubenswrapper[5003]: I1206 15:35:59.927134 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-x2bhv" Dec 06 15:36:00 crc kubenswrapper[5003]: I1206 15:36:00.027575 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-x4xqj" Dec 06 15:36:00 crc kubenswrapper[5003]: I1206 15:36:00.027624 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-x4xqj" Dec 06 15:36:00 crc kubenswrapper[5003]: I1206 15:36:00.084870 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-x4xqj" Dec 06 15:36:00 crc kubenswrapper[5003]: I1206 15:36:00.100040 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-jqxj7" Dec 06 15:36:00 crc kubenswrapper[5003]: I1206 15:36:00.127227 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-gtwdh" Dec 06 15:36:00 crc kubenswrapper[5003]: I1206 15:36:00.144017 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-x4xqj" Dec 06 15:36:00 crc kubenswrapper[5003]: I1206 15:36:00.156818 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-jqxj7" Dec 06 15:36:00 crc kubenswrapper[5003]: I1206 15:36:00.159701 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-x2bhv" Dec 06 15:36:01 crc kubenswrapper[5003]: I1206 15:36:01.237871 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-b5rnp" Dec 06 15:36:01 crc kubenswrapper[5003]: I1206 15:36:01.239109 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-b5rnp" Dec 06 15:36:01 crc kubenswrapper[5003]: I1206 15:36:01.290095 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-b5rnp" Dec 06 15:36:01 crc kubenswrapper[5003]: I1206 15:36:01.745774 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-pzkrw" Dec 06 15:36:02 crc kubenswrapper[5003]: I1206 15:36:02.142734 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-b5rnp" Dec 06 15:36:02 crc kubenswrapper[5003]: I1206 15:36:02.338618 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-x2bhv"] Dec 06 15:36:02 crc kubenswrapper[5003]: I1206 15:36:02.339416 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-x2bhv" podUID="a73c8333-dbdd-447b-b940-5aca2f15d00d" containerName="registry-server" containerID="cri-o://28312340308c6c2ad9ff05fda8438cab72ec2637a9e8248893c070f3d28ed811" gracePeriod=2 Dec 06 15:36:02 crc kubenswrapper[5003]: I1206 15:36:02.482340 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-mbw9t" Dec 06 15:36:02 crc kubenswrapper[5003]: I1206 15:36:02.482775 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-mbw9t" Dec 06 15:36:02 crc kubenswrapper[5003]: I1206 15:36:02.550007 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-mbw9t" Dec 06 15:36:02 crc kubenswrapper[5003]: I1206 15:36:02.937848 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-x4xqj"] Dec 06 15:36:02 crc kubenswrapper[5003]: I1206 15:36:02.938079 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-x4xqj" podUID="78c47ecf-a4ad-4ac0-85f9-66fa3c8651b4" containerName="registry-server" containerID="cri-o://ef958b7c591597101d3db38b2c6ffd23bbc2b747c581e47af384853dea5b0c20" gracePeriod=2 Dec 06 15:36:02 crc kubenswrapper[5003]: I1206 15:36:02.940851 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-gwpxn" Dec 06 15:36:02 crc kubenswrapper[5003]: I1206 15:36:02.941383 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-gwpxn" Dec 06 15:36:02 crc kubenswrapper[5003]: I1206 15:36:02.986260 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-gwpxn" Dec 06 15:36:03 crc kubenswrapper[5003]: I1206 15:36:03.149029 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-gwpxn" Dec 06 15:36:03 crc kubenswrapper[5003]: I1206 15:36:03.152836 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-mbw9t" Dec 06 15:36:04 crc kubenswrapper[5003]: I1206 15:36:04.737400 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-pzkrw"] Dec 06 15:36:04 crc kubenswrapper[5003]: I1206 15:36:04.737888 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-pzkrw" podUID="b7b85911-8f34-416f-b3f9-2776b2aa7876" containerName="registry-server" containerID="cri-o://cee0fa2fc0e2e8e2da2c83ec1a8892ba4b24e6caa9e9d00f2efa10d023de603f" gracePeriod=2 Dec 06 15:36:05 crc kubenswrapper[5003]: I1206 15:36:05.120927 5003 generic.go:334] "Generic (PLEG): container finished" podID="b7b85911-8f34-416f-b3f9-2776b2aa7876" containerID="cee0fa2fc0e2e8e2da2c83ec1a8892ba4b24e6caa9e9d00f2efa10d023de603f" exitCode=0 Dec 06 15:36:05 crc kubenswrapper[5003]: I1206 15:36:05.121208 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pzkrw" event={"ID":"b7b85911-8f34-416f-b3f9-2776b2aa7876","Type":"ContainerDied","Data":"cee0fa2fc0e2e8e2da2c83ec1a8892ba4b24e6caa9e9d00f2efa10d023de603f"} Dec 06 15:36:05 crc kubenswrapper[5003]: I1206 15:36:05.125770 5003 generic.go:334] "Generic (PLEG): container finished" podID="a73c8333-dbdd-447b-b940-5aca2f15d00d" containerID="28312340308c6c2ad9ff05fda8438cab72ec2637a9e8248893c070f3d28ed811" exitCode=0 Dec 06 15:36:05 crc kubenswrapper[5003]: I1206 15:36:05.125816 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-x2bhv" event={"ID":"a73c8333-dbdd-447b-b940-5aca2f15d00d","Type":"ContainerDied","Data":"28312340308c6c2ad9ff05fda8438cab72ec2637a9e8248893c070f3d28ed811"} Dec 06 15:36:05 crc kubenswrapper[5003]: I1206 15:36:05.127734 5003 generic.go:334] "Generic (PLEG): container finished" podID="78c47ecf-a4ad-4ac0-85f9-66fa3c8651b4" containerID="ef958b7c591597101d3db38b2c6ffd23bbc2b747c581e47af384853dea5b0c20" exitCode=0 Dec 06 15:36:05 crc kubenswrapper[5003]: I1206 15:36:05.128310 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-x4xqj" event={"ID":"78c47ecf-a4ad-4ac0-85f9-66fa3c8651b4","Type":"ContainerDied","Data":"ef958b7c591597101d3db38b2c6ffd23bbc2b747c581e47af384853dea5b0c20"} Dec 06 15:36:05 crc kubenswrapper[5003]: I1206 15:36:05.171238 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-x2bhv" Dec 06 15:36:05 crc kubenswrapper[5003]: I1206 15:36:05.212177 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-x4xqj" Dec 06 15:36:05 crc kubenswrapper[5003]: I1206 15:36:05.299916 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a73c8333-dbdd-447b-b940-5aca2f15d00d-utilities\") pod \"a73c8333-dbdd-447b-b940-5aca2f15d00d\" (UID: \"a73c8333-dbdd-447b-b940-5aca2f15d00d\") " Dec 06 15:36:05 crc kubenswrapper[5003]: I1206 15:36:05.299977 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a73c8333-dbdd-447b-b940-5aca2f15d00d-catalog-content\") pod \"a73c8333-dbdd-447b-b940-5aca2f15d00d\" (UID: \"a73c8333-dbdd-447b-b940-5aca2f15d00d\") " Dec 06 15:36:05 crc kubenswrapper[5003]: I1206 15:36:05.300024 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tsk7n\" (UniqueName: \"kubernetes.io/projected/a73c8333-dbdd-447b-b940-5aca2f15d00d-kube-api-access-tsk7n\") pod \"a73c8333-dbdd-447b-b940-5aca2f15d00d\" (UID: \"a73c8333-dbdd-447b-b940-5aca2f15d00d\") " Dec 06 15:36:05 crc kubenswrapper[5003]: I1206 15:36:05.301336 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a73c8333-dbdd-447b-b940-5aca2f15d00d-utilities" (OuterVolumeSpecName: "utilities") pod "a73c8333-dbdd-447b-b940-5aca2f15d00d" (UID: "a73c8333-dbdd-447b-b940-5aca2f15d00d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 15:36:05 crc kubenswrapper[5003]: I1206 15:36:05.306367 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a73c8333-dbdd-447b-b940-5aca2f15d00d-kube-api-access-tsk7n" (OuterVolumeSpecName: "kube-api-access-tsk7n") pod "a73c8333-dbdd-447b-b940-5aca2f15d00d" (UID: "a73c8333-dbdd-447b-b940-5aca2f15d00d"). InnerVolumeSpecName "kube-api-access-tsk7n". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 15:36:05 crc kubenswrapper[5003]: I1206 15:36:05.358598 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a73c8333-dbdd-447b-b940-5aca2f15d00d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "a73c8333-dbdd-447b-b940-5aca2f15d00d" (UID: "a73c8333-dbdd-447b-b940-5aca2f15d00d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 15:36:05 crc kubenswrapper[5003]: I1206 15:36:05.401240 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d2scn\" (UniqueName: \"kubernetes.io/projected/78c47ecf-a4ad-4ac0-85f9-66fa3c8651b4-kube-api-access-d2scn\") pod \"78c47ecf-a4ad-4ac0-85f9-66fa3c8651b4\" (UID: \"78c47ecf-a4ad-4ac0-85f9-66fa3c8651b4\") " Dec 06 15:36:05 crc kubenswrapper[5003]: I1206 15:36:05.401361 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/78c47ecf-a4ad-4ac0-85f9-66fa3c8651b4-catalog-content\") pod \"78c47ecf-a4ad-4ac0-85f9-66fa3c8651b4\" (UID: \"78c47ecf-a4ad-4ac0-85f9-66fa3c8651b4\") " Dec 06 15:36:05 crc kubenswrapper[5003]: I1206 15:36:05.401400 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/78c47ecf-a4ad-4ac0-85f9-66fa3c8651b4-utilities\") pod \"78c47ecf-a4ad-4ac0-85f9-66fa3c8651b4\" (UID: \"78c47ecf-a4ad-4ac0-85f9-66fa3c8651b4\") " Dec 06 15:36:05 crc kubenswrapper[5003]: I1206 15:36:05.401664 5003 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a73c8333-dbdd-447b-b940-5aca2f15d00d-utilities\") on node \"crc\" DevicePath \"\"" Dec 06 15:36:05 crc kubenswrapper[5003]: I1206 15:36:05.401679 5003 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a73c8333-dbdd-447b-b940-5aca2f15d00d-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 06 15:36:05 crc kubenswrapper[5003]: I1206 15:36:05.401688 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tsk7n\" (UniqueName: \"kubernetes.io/projected/a73c8333-dbdd-447b-b940-5aca2f15d00d-kube-api-access-tsk7n\") on node \"crc\" DevicePath \"\"" Dec 06 15:36:05 crc kubenswrapper[5003]: I1206 15:36:05.402289 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/78c47ecf-a4ad-4ac0-85f9-66fa3c8651b4-utilities" (OuterVolumeSpecName: "utilities") pod "78c47ecf-a4ad-4ac0-85f9-66fa3c8651b4" (UID: "78c47ecf-a4ad-4ac0-85f9-66fa3c8651b4"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 15:36:05 crc kubenswrapper[5003]: I1206 15:36:05.405363 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/78c47ecf-a4ad-4ac0-85f9-66fa3c8651b4-kube-api-access-d2scn" (OuterVolumeSpecName: "kube-api-access-d2scn") pod "78c47ecf-a4ad-4ac0-85f9-66fa3c8651b4" (UID: "78c47ecf-a4ad-4ac0-85f9-66fa3c8651b4"). InnerVolumeSpecName "kube-api-access-d2scn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 15:36:05 crc kubenswrapper[5003]: I1206 15:36:05.454538 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/78c47ecf-a4ad-4ac0-85f9-66fa3c8651b4-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "78c47ecf-a4ad-4ac0-85f9-66fa3c8651b4" (UID: "78c47ecf-a4ad-4ac0-85f9-66fa3c8651b4"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 15:36:05 crc kubenswrapper[5003]: I1206 15:36:05.503322 5003 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/78c47ecf-a4ad-4ac0-85f9-66fa3c8651b4-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 06 15:36:05 crc kubenswrapper[5003]: I1206 15:36:05.503376 5003 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/78c47ecf-a4ad-4ac0-85f9-66fa3c8651b4-utilities\") on node \"crc\" DevicePath \"\"" Dec 06 15:36:05 crc kubenswrapper[5003]: I1206 15:36:05.503388 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d2scn\" (UniqueName: \"kubernetes.io/projected/78c47ecf-a4ad-4ac0-85f9-66fa3c8651b4-kube-api-access-d2scn\") on node \"crc\" DevicePath \"\"" Dec 06 15:36:05 crc kubenswrapper[5003]: I1206 15:36:05.571959 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-pzkrw" Dec 06 15:36:05 crc kubenswrapper[5003]: I1206 15:36:05.705880 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mq5bv\" (UniqueName: \"kubernetes.io/projected/b7b85911-8f34-416f-b3f9-2776b2aa7876-kube-api-access-mq5bv\") pod \"b7b85911-8f34-416f-b3f9-2776b2aa7876\" (UID: \"b7b85911-8f34-416f-b3f9-2776b2aa7876\") " Dec 06 15:36:05 crc kubenswrapper[5003]: I1206 15:36:05.706260 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b7b85911-8f34-416f-b3f9-2776b2aa7876-catalog-content\") pod \"b7b85911-8f34-416f-b3f9-2776b2aa7876\" (UID: \"b7b85911-8f34-416f-b3f9-2776b2aa7876\") " Dec 06 15:36:05 crc kubenswrapper[5003]: I1206 15:36:05.706336 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b7b85911-8f34-416f-b3f9-2776b2aa7876-utilities\") pod \"b7b85911-8f34-416f-b3f9-2776b2aa7876\" (UID: \"b7b85911-8f34-416f-b3f9-2776b2aa7876\") " Dec 06 15:36:05 crc kubenswrapper[5003]: I1206 15:36:05.707145 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b7b85911-8f34-416f-b3f9-2776b2aa7876-utilities" (OuterVolumeSpecName: "utilities") pod "b7b85911-8f34-416f-b3f9-2776b2aa7876" (UID: "b7b85911-8f34-416f-b3f9-2776b2aa7876"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 15:36:05 crc kubenswrapper[5003]: I1206 15:36:05.708778 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b7b85911-8f34-416f-b3f9-2776b2aa7876-kube-api-access-mq5bv" (OuterVolumeSpecName: "kube-api-access-mq5bv") pod "b7b85911-8f34-416f-b3f9-2776b2aa7876" (UID: "b7b85911-8f34-416f-b3f9-2776b2aa7876"). InnerVolumeSpecName "kube-api-access-mq5bv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 15:36:05 crc kubenswrapper[5003]: I1206 15:36:05.728051 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b7b85911-8f34-416f-b3f9-2776b2aa7876-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b7b85911-8f34-416f-b3f9-2776b2aa7876" (UID: "b7b85911-8f34-416f-b3f9-2776b2aa7876"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 15:36:05 crc kubenswrapper[5003]: I1206 15:36:05.807990 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mq5bv\" (UniqueName: \"kubernetes.io/projected/b7b85911-8f34-416f-b3f9-2776b2aa7876-kube-api-access-mq5bv\") on node \"crc\" DevicePath \"\"" Dec 06 15:36:05 crc kubenswrapper[5003]: I1206 15:36:05.808040 5003 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b7b85911-8f34-416f-b3f9-2776b2aa7876-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 06 15:36:05 crc kubenswrapper[5003]: I1206 15:36:05.808053 5003 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b7b85911-8f34-416f-b3f9-2776b2aa7876-utilities\") on node \"crc\" DevicePath \"\"" Dec 06 15:36:06 crc kubenswrapper[5003]: I1206 15:36:06.135833 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-pzkrw" Dec 06 15:36:06 crc kubenswrapper[5003]: I1206 15:36:06.135828 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pzkrw" event={"ID":"b7b85911-8f34-416f-b3f9-2776b2aa7876","Type":"ContainerDied","Data":"af743930f7f20344700da3a8be2d8efbc6ed7095a6c88c3923bd7817738db6bd"} Dec 06 15:36:06 crc kubenswrapper[5003]: I1206 15:36:06.136019 5003 scope.go:117] "RemoveContainer" containerID="cee0fa2fc0e2e8e2da2c83ec1a8892ba4b24e6caa9e9d00f2efa10d023de603f" Dec 06 15:36:06 crc kubenswrapper[5003]: I1206 15:36:06.138325 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-x2bhv" event={"ID":"a73c8333-dbdd-447b-b940-5aca2f15d00d","Type":"ContainerDied","Data":"5def1bc74296a7755fffd1e04535358f85fcf146a68c9e762a4a1aa1ad5b4bc4"} Dec 06 15:36:06 crc kubenswrapper[5003]: I1206 15:36:06.138363 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-x2bhv" Dec 06 15:36:06 crc kubenswrapper[5003]: I1206 15:36:06.141884 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-x4xqj" event={"ID":"78c47ecf-a4ad-4ac0-85f9-66fa3c8651b4","Type":"ContainerDied","Data":"567130f65a9cacd76dbcdafef3fdbe66ae7108e28795c8f4d63cef1665b499e8"} Dec 06 15:36:06 crc kubenswrapper[5003]: I1206 15:36:06.141985 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-x4xqj" Dec 06 15:36:06 crc kubenswrapper[5003]: I1206 15:36:06.160559 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-x2bhv"] Dec 06 15:36:06 crc kubenswrapper[5003]: I1206 15:36:06.165136 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-x2bhv"] Dec 06 15:36:06 crc kubenswrapper[5003]: I1206 15:36:06.165937 5003 scope.go:117] "RemoveContainer" containerID="294267902fc5c1a281b441b90ad4ff374c9feeb95876c885957e4ae654807eb7" Dec 06 15:36:06 crc kubenswrapper[5003]: I1206 15:36:06.176945 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-x4xqj"] Dec 06 15:36:06 crc kubenswrapper[5003]: I1206 15:36:06.185166 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-x4xqj"] Dec 06 15:36:06 crc kubenswrapper[5003]: I1206 15:36:06.188158 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-pzkrw"] Dec 06 15:36:06 crc kubenswrapper[5003]: I1206 15:36:06.191538 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-pzkrw"] Dec 06 15:36:06 crc kubenswrapper[5003]: I1206 15:36:06.192750 5003 scope.go:117] "RemoveContainer" containerID="1863ec88537c134f63306a8c8736febc2272e940da26a2eae6588201b34f5e67" Dec 06 15:36:06 crc kubenswrapper[5003]: I1206 15:36:06.205647 5003 scope.go:117] "RemoveContainer" containerID="28312340308c6c2ad9ff05fda8438cab72ec2637a9e8248893c070f3d28ed811" Dec 06 15:36:06 crc kubenswrapper[5003]: I1206 15:36:06.223051 5003 scope.go:117] "RemoveContainer" containerID="e8282c0d23957b5f2c04699a19f54f397f94a6301ae942de9cbc4319a4bfbd9f" Dec 06 15:36:06 crc kubenswrapper[5003]: I1206 15:36:06.240194 5003 scope.go:117] "RemoveContainer" containerID="981a6630aeed8abcfc1d9811f705234870ede08d7b9cc9e59219e3d8c339eacf" Dec 06 15:36:06 crc kubenswrapper[5003]: I1206 15:36:06.253949 5003 scope.go:117] "RemoveContainer" containerID="ef958b7c591597101d3db38b2c6ffd23bbc2b747c581e47af384853dea5b0c20" Dec 06 15:36:06 crc kubenswrapper[5003]: I1206 15:36:06.268119 5003 scope.go:117] "RemoveContainer" containerID="aafd5ddef69565d6ba10d6c654d0969bcc3e7d91226b4e25b63cfee57341700b" Dec 06 15:36:06 crc kubenswrapper[5003]: I1206 15:36:06.281965 5003 scope.go:117] "RemoveContainer" containerID="b9995232ce54c673c8a974886149cdf14f22fe73fa727a0a2b165c84d7b2c2ac" Dec 06 15:36:07 crc kubenswrapper[5003]: I1206 15:36:07.139982 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-gwpxn"] Dec 06 15:36:07 crc kubenswrapper[5003]: I1206 15:36:07.140180 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-gwpxn" podUID="158c5179-aee8-4e5b-8a39-38d19808b3fd" containerName="registry-server" containerID="cri-o://0c74c29d9df46fb3d47a5b5adb74059b162b5cff305b3b5845b3876bd7ae87f0" gracePeriod=2 Dec 06 15:36:07 crc kubenswrapper[5003]: I1206 15:36:07.726705 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="78c47ecf-a4ad-4ac0-85f9-66fa3c8651b4" path="/var/lib/kubelet/pods/78c47ecf-a4ad-4ac0-85f9-66fa3c8651b4/volumes" Dec 06 15:36:07 crc kubenswrapper[5003]: I1206 15:36:07.728642 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a73c8333-dbdd-447b-b940-5aca2f15d00d" path="/var/lib/kubelet/pods/a73c8333-dbdd-447b-b940-5aca2f15d00d/volumes" Dec 06 15:36:07 crc kubenswrapper[5003]: I1206 15:36:07.729889 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b7b85911-8f34-416f-b3f9-2776b2aa7876" path="/var/lib/kubelet/pods/b7b85911-8f34-416f-b3f9-2776b2aa7876/volumes" Dec 06 15:36:08 crc kubenswrapper[5003]: I1206 15:36:08.145595 5003 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Dec 06 15:36:08 crc kubenswrapper[5003]: E1206 15:36:08.145795 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a73c8333-dbdd-447b-b940-5aca2f15d00d" containerName="extract-utilities" Dec 06 15:36:08 crc kubenswrapper[5003]: I1206 15:36:08.145807 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="a73c8333-dbdd-447b-b940-5aca2f15d00d" containerName="extract-utilities" Dec 06 15:36:08 crc kubenswrapper[5003]: E1206 15:36:08.145820 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b7b85911-8f34-416f-b3f9-2776b2aa7876" containerName="registry-server" Dec 06 15:36:08 crc kubenswrapper[5003]: I1206 15:36:08.145826 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="b7b85911-8f34-416f-b3f9-2776b2aa7876" containerName="registry-server" Dec 06 15:36:08 crc kubenswrapper[5003]: E1206 15:36:08.145833 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="78c47ecf-a4ad-4ac0-85f9-66fa3c8651b4" containerName="extract-utilities" Dec 06 15:36:08 crc kubenswrapper[5003]: I1206 15:36:08.145838 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="78c47ecf-a4ad-4ac0-85f9-66fa3c8651b4" containerName="extract-utilities" Dec 06 15:36:08 crc kubenswrapper[5003]: E1206 15:36:08.145844 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="78c47ecf-a4ad-4ac0-85f9-66fa3c8651b4" containerName="registry-server" Dec 06 15:36:08 crc kubenswrapper[5003]: I1206 15:36:08.145849 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="78c47ecf-a4ad-4ac0-85f9-66fa3c8651b4" containerName="registry-server" Dec 06 15:36:08 crc kubenswrapper[5003]: E1206 15:36:08.145859 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a73c8333-dbdd-447b-b940-5aca2f15d00d" containerName="registry-server" Dec 06 15:36:08 crc kubenswrapper[5003]: I1206 15:36:08.145864 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="a73c8333-dbdd-447b-b940-5aca2f15d00d" containerName="registry-server" Dec 06 15:36:08 crc kubenswrapper[5003]: E1206 15:36:08.145874 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a73c8333-dbdd-447b-b940-5aca2f15d00d" containerName="extract-content" Dec 06 15:36:08 crc kubenswrapper[5003]: I1206 15:36:08.145880 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="a73c8333-dbdd-447b-b940-5aca2f15d00d" containerName="extract-content" Dec 06 15:36:08 crc kubenswrapper[5003]: E1206 15:36:08.145886 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="21cd71a4-5614-4f8c-b9fd-a1ab2b21c363" containerName="pruner" Dec 06 15:36:08 crc kubenswrapper[5003]: I1206 15:36:08.145891 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="21cd71a4-5614-4f8c-b9fd-a1ab2b21c363" containerName="pruner" Dec 06 15:36:08 crc kubenswrapper[5003]: E1206 15:36:08.145901 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="78c47ecf-a4ad-4ac0-85f9-66fa3c8651b4" containerName="extract-content" Dec 06 15:36:08 crc kubenswrapper[5003]: I1206 15:36:08.145907 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="78c47ecf-a4ad-4ac0-85f9-66fa3c8651b4" containerName="extract-content" Dec 06 15:36:08 crc kubenswrapper[5003]: E1206 15:36:08.145913 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b7b85911-8f34-416f-b3f9-2776b2aa7876" containerName="extract-content" Dec 06 15:36:08 crc kubenswrapper[5003]: I1206 15:36:08.145919 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="b7b85911-8f34-416f-b3f9-2776b2aa7876" containerName="extract-content" Dec 06 15:36:08 crc kubenswrapper[5003]: E1206 15:36:08.145928 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b7b85911-8f34-416f-b3f9-2776b2aa7876" containerName="extract-utilities" Dec 06 15:36:08 crc kubenswrapper[5003]: I1206 15:36:08.145933 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="b7b85911-8f34-416f-b3f9-2776b2aa7876" containerName="extract-utilities" Dec 06 15:36:08 crc kubenswrapper[5003]: I1206 15:36:08.146042 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="78c47ecf-a4ad-4ac0-85f9-66fa3c8651b4" containerName="registry-server" Dec 06 15:36:08 crc kubenswrapper[5003]: I1206 15:36:08.146052 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="a73c8333-dbdd-447b-b940-5aca2f15d00d" containerName="registry-server" Dec 06 15:36:08 crc kubenswrapper[5003]: I1206 15:36:08.146063 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="21cd71a4-5614-4f8c-b9fd-a1ab2b21c363" containerName="pruner" Dec 06 15:36:08 crc kubenswrapper[5003]: I1206 15:36:08.146071 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="b7b85911-8f34-416f-b3f9-2776b2aa7876" containerName="registry-server" Dec 06 15:36:08 crc kubenswrapper[5003]: I1206 15:36:08.146406 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 06 15:36:08 crc kubenswrapper[5003]: I1206 15:36:08.147381 5003 kubelet.go:2431] "SyncLoop REMOVE" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 06 15:36:08 crc kubenswrapper[5003]: I1206 15:36:08.147519 5003 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 06 15:36:08 crc kubenswrapper[5003]: E1206 15:36:08.147831 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 06 15:36:08 crc kubenswrapper[5003]: I1206 15:36:08.148097 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 06 15:36:08 crc kubenswrapper[5003]: E1206 15:36:08.148160 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Dec 06 15:36:08 crc kubenswrapper[5003]: I1206 15:36:08.148221 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Dec 06 15:36:08 crc kubenswrapper[5003]: E1206 15:36:08.148278 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="setup" Dec 06 15:36:08 crc kubenswrapper[5003]: I1206 15:36:08.148352 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="setup" Dec 06 15:36:08 crc kubenswrapper[5003]: E1206 15:36:08.148411 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Dec 06 15:36:08 crc kubenswrapper[5003]: I1206 15:36:08.148472 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Dec 06 15:36:08 crc kubenswrapper[5003]: E1206 15:36:08.148552 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Dec 06 15:36:08 crc kubenswrapper[5003]: I1206 15:36:08.148616 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Dec 06 15:36:08 crc kubenswrapper[5003]: E1206 15:36:08.148683 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Dec 06 15:36:08 crc kubenswrapper[5003]: I1206 15:36:08.148745 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Dec 06 15:36:08 crc kubenswrapper[5003]: I1206 15:36:08.148828 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" containerID="cri-o://c9f683cdba22afa5d1d069cc32c6c83addf344c8b0ba3b39b7a2ca527408922c" gracePeriod=15 Dec 06 15:36:08 crc kubenswrapper[5003]: I1206 15:36:08.149106 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" containerID="cri-o://9ac8291e7fa9a5ff379474802b6eae771542705aef4c443cc3123837d10dd9e5" gracePeriod=15 Dec 06 15:36:08 crc kubenswrapper[5003]: I1206 15:36:08.149148 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" containerID="cri-o://ae2215e89d65c7bd69288cda74cdd83f0349f57b47f80ff1fa03d60a484558b5" gracePeriod=15 Dec 06 15:36:08 crc kubenswrapper[5003]: I1206 15:36:08.149241 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" containerID="cri-o://b6013b3a95ef99138e6ea971b51a45f65923c09435a2595bba7af91eebb28d76" gracePeriod=15 Dec 06 15:36:08 crc kubenswrapper[5003]: I1206 15:36:08.149648 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Dec 06 15:36:08 crc kubenswrapper[5003]: I1206 15:36:08.149746 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Dec 06 15:36:08 crc kubenswrapper[5003]: I1206 15:36:08.149800 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 06 15:36:08 crc kubenswrapper[5003]: I1206 15:36:08.151117 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Dec 06 15:36:08 crc kubenswrapper[5003]: I1206 15:36:08.151225 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 06 15:36:08 crc kubenswrapper[5003]: I1206 15:36:08.151311 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Dec 06 15:36:08 crc kubenswrapper[5003]: I1206 15:36:08.150983 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" containerID="cri-o://c777cc5d07ff005aa8001da8a566484710f0253e4a6061e3297a884c6153a7d0" gracePeriod=15 Dec 06 15:36:08 crc kubenswrapper[5003]: E1206 15:36:08.151634 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 06 15:36:08 crc kubenswrapper[5003]: I1206 15:36:08.151702 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 06 15:36:08 crc kubenswrapper[5003]: I1206 15:36:08.179891 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Dec 06 15:36:08 crc kubenswrapper[5003]: I1206 15:36:08.260266 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 06 15:36:08 crc kubenswrapper[5003]: I1206 15:36:08.260317 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 06 15:36:08 crc kubenswrapper[5003]: I1206 15:36:08.260341 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 06 15:36:08 crc kubenswrapper[5003]: I1206 15:36:08.260357 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 06 15:36:08 crc kubenswrapper[5003]: I1206 15:36:08.260379 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 06 15:36:08 crc kubenswrapper[5003]: I1206 15:36:08.260398 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 06 15:36:08 crc kubenswrapper[5003]: I1206 15:36:08.260419 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 06 15:36:08 crc kubenswrapper[5003]: I1206 15:36:08.260454 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 06 15:36:08 crc kubenswrapper[5003]: I1206 15:36:08.361237 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 06 15:36:08 crc kubenswrapper[5003]: I1206 15:36:08.361358 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 06 15:36:08 crc kubenswrapper[5003]: I1206 15:36:08.361409 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 06 15:36:08 crc kubenswrapper[5003]: I1206 15:36:08.361452 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 06 15:36:08 crc kubenswrapper[5003]: I1206 15:36:08.361526 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 06 15:36:08 crc kubenswrapper[5003]: I1206 15:36:08.361521 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 06 15:36:08 crc kubenswrapper[5003]: I1206 15:36:08.361595 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 06 15:36:08 crc kubenswrapper[5003]: I1206 15:36:08.361562 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 06 15:36:08 crc kubenswrapper[5003]: I1206 15:36:08.361625 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 06 15:36:08 crc kubenswrapper[5003]: I1206 15:36:08.361645 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 06 15:36:08 crc kubenswrapper[5003]: I1206 15:36:08.361521 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 06 15:36:08 crc kubenswrapper[5003]: I1206 15:36:08.361565 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 06 15:36:08 crc kubenswrapper[5003]: I1206 15:36:08.361810 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 06 15:36:08 crc kubenswrapper[5003]: I1206 15:36:08.361880 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 06 15:36:08 crc kubenswrapper[5003]: I1206 15:36:08.361985 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 06 15:36:08 crc kubenswrapper[5003]: I1206 15:36:08.362098 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 06 15:36:08 crc kubenswrapper[5003]: I1206 15:36:08.477225 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 06 15:36:08 crc kubenswrapper[5003]: W1206 15:36:08.500670 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf85e55b1a89d02b0cb034b1ea31ed45a.slice/crio-9f8fcda229a9c53c0842a31ac8887839e37237f963484a3a62fcc3c90439b004 WatchSource:0}: Error finding container 9f8fcda229a9c53c0842a31ac8887839e37237f963484a3a62fcc3c90439b004: Status 404 returned error can't find the container with id 9f8fcda229a9c53c0842a31ac8887839e37237f963484a3a62fcc3c90439b004 Dec 06 15:36:08 crc kubenswrapper[5003]: E1206 15:36:08.893369 5003 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/events\": dial tcp 38.102.83.73:6443: connect: connection refused" event="&Event{ObjectMeta:{kube-apiserver-startup-monitor-crc.187eaa5102d89bae openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-startup-monitor-crc,UID:f85e55b1a89d02b0cb034b1ea31ed45a,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{startup-monitor},},Reason:Created,Message:Created container startup-monitor,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-06 15:36:08.892636078 +0000 UTC m=+247.425990459,LastTimestamp:2025-12-06 15:36:08.892636078 +0000 UTC m=+247.425990459,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Dec 06 15:36:09 crc kubenswrapper[5003]: I1206 15:36:09.166086 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" event={"ID":"f85e55b1a89d02b0cb034b1ea31ed45a","Type":"ContainerStarted","Data":"a2b6b0947f32c0a44ee1fb4927ff98d8f411909f06c4abcf03fcbd5b70180311"} Dec 06 15:36:09 crc kubenswrapper[5003]: I1206 15:36:09.166144 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" event={"ID":"f85e55b1a89d02b0cb034b1ea31ed45a","Type":"ContainerStarted","Data":"9f8fcda229a9c53c0842a31ac8887839e37237f963484a3a62fcc3c90439b004"} Dec 06 15:36:09 crc kubenswrapper[5003]: I1206 15:36:09.167106 5003 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.73:6443: connect: connection refused" Dec 06 15:36:09 crc kubenswrapper[5003]: I1206 15:36:09.167668 5003 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.73:6443: connect: connection refused" Dec 06 15:36:09 crc kubenswrapper[5003]: I1206 15:36:09.168512 5003 generic.go:334] "Generic (PLEG): container finished" podID="7a65c8f8-e5cf-471d-bbf1-2c541164cdf8" containerID="a7ec154a4d732cd6f0371e157fa8ff6aed0c56803b19e90d35a0257a34967e5e" exitCode=0 Dec 06 15:36:09 crc kubenswrapper[5003]: I1206 15:36:09.168542 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"7a65c8f8-e5cf-471d-bbf1-2c541164cdf8","Type":"ContainerDied","Data":"a7ec154a4d732cd6f0371e157fa8ff6aed0c56803b19e90d35a0257a34967e5e"} Dec 06 15:36:09 crc kubenswrapper[5003]: I1206 15:36:09.169074 5003 status_manager.go:851] "Failed to get status for pod" podUID="7a65c8f8-e5cf-471d-bbf1-2c541164cdf8" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.73:6443: connect: connection refused" Dec 06 15:36:09 crc kubenswrapper[5003]: I1206 15:36:09.169370 5003 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.73:6443: connect: connection refused" Dec 06 15:36:09 crc kubenswrapper[5003]: I1206 15:36:09.169679 5003 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.73:6443: connect: connection refused" Dec 06 15:36:09 crc kubenswrapper[5003]: I1206 15:36:09.170834 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Dec 06 15:36:09 crc kubenswrapper[5003]: I1206 15:36:09.171956 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 06 15:36:09 crc kubenswrapper[5003]: I1206 15:36:09.172634 5003 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="ae2215e89d65c7bd69288cda74cdd83f0349f57b47f80ff1fa03d60a484558b5" exitCode=0 Dec 06 15:36:09 crc kubenswrapper[5003]: I1206 15:36:09.172655 5003 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="b6013b3a95ef99138e6ea971b51a45f65923c09435a2595bba7af91eebb28d76" exitCode=0 Dec 06 15:36:09 crc kubenswrapper[5003]: I1206 15:36:09.172664 5003 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="c777cc5d07ff005aa8001da8a566484710f0253e4a6061e3297a884c6153a7d0" exitCode=0 Dec 06 15:36:09 crc kubenswrapper[5003]: I1206 15:36:09.172672 5003 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="9ac8291e7fa9a5ff379474802b6eae771542705aef4c443cc3123837d10dd9e5" exitCode=2 Dec 06 15:36:09 crc kubenswrapper[5003]: I1206 15:36:09.172728 5003 scope.go:117] "RemoveContainer" containerID="efc65bc9bb60202069cb51eed80fb62e527399c4189554e791bab3e23993c95c" Dec 06 15:36:09 crc kubenswrapper[5003]: I1206 15:36:09.175318 5003 generic.go:334] "Generic (PLEG): container finished" podID="158c5179-aee8-4e5b-8a39-38d19808b3fd" containerID="0c74c29d9df46fb3d47a5b5adb74059b162b5cff305b3b5845b3876bd7ae87f0" exitCode=0 Dec 06 15:36:09 crc kubenswrapper[5003]: I1206 15:36:09.175338 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gwpxn" event={"ID":"158c5179-aee8-4e5b-8a39-38d19808b3fd","Type":"ContainerDied","Data":"0c74c29d9df46fb3d47a5b5adb74059b162b5cff305b3b5845b3876bd7ae87f0"} Dec 06 15:36:09 crc kubenswrapper[5003]: E1206 15:36:09.720663 5003 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-06T15:36:09Z\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-06T15:36:09Z\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-06T15:36:09Z\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-06T15:36:09Z\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:15adb3b2133604b064893f8009a74145e4c8bb5b134d111346dcccbdd2aa9bc2\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:164fc35a19aa6cc886c8015c8ee3eba4895e76b1152cb9d795e4f3154a8533a3\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1610512706},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:544a01170a4aa6cf8322d5bffa5817113efd696e3c3e9bac6a29d2da9f9451e5\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:67f42a86b99b69b357285a6845977f967e6c825de2049c19620a78eaf99cebf3\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1222075732},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:3523faa7434bcaef341b7154f6919f25a1fcaa583614c94f5051332024556dd6\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:f8c8ed2e117f0e59ac547d4dea01ecc0c909006cccfbd646461171ffa7a8fb31\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1201849672},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1b1026c62413fa239fa4ff6541fe8bda656c1281867ad6ee2c848feccb13c97e\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:2b633ebdc901d19290af4dc2d09e2b59c504c0fc15a3fba410b0ce098e2d5753\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1141987142},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792}]}}\" for node \"crc\": Patch \"https://api-int.crc.testing:6443/api/v1/nodes/crc/status?timeout=10s\": dial tcp 38.102.83.73:6443: connect: connection refused" Dec 06 15:36:09 crc kubenswrapper[5003]: E1206 15:36:09.721254 5003 kubelet_node_status.go:585] "Error updating node status, will retry" err="error getting node \"crc\": Get \"https://api-int.crc.testing:6443/api/v1/nodes/crc?timeout=10s\": dial tcp 38.102.83.73:6443: connect: connection refused" Dec 06 15:36:09 crc kubenswrapper[5003]: E1206 15:36:09.721650 5003 kubelet_node_status.go:585] "Error updating node status, will retry" err="error getting node \"crc\": Get \"https://api-int.crc.testing:6443/api/v1/nodes/crc?timeout=10s\": dial tcp 38.102.83.73:6443: connect: connection refused" Dec 06 15:36:09 crc kubenswrapper[5003]: E1206 15:36:09.722006 5003 kubelet_node_status.go:585] "Error updating node status, will retry" err="error getting node \"crc\": Get \"https://api-int.crc.testing:6443/api/v1/nodes/crc?timeout=10s\": dial tcp 38.102.83.73:6443: connect: connection refused" Dec 06 15:36:09 crc kubenswrapper[5003]: E1206 15:36:09.722415 5003 kubelet_node_status.go:585] "Error updating node status, will retry" err="error getting node \"crc\": Get \"https://api-int.crc.testing:6443/api/v1/nodes/crc?timeout=10s\": dial tcp 38.102.83.73:6443: connect: connection refused" Dec 06 15:36:09 crc kubenswrapper[5003]: E1206 15:36:09.722448 5003 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 06 15:36:10 crc kubenswrapper[5003]: I1206 15:36:10.240280 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-gwpxn" Dec 06 15:36:10 crc kubenswrapper[5003]: I1206 15:36:10.241502 5003 status_manager.go:851] "Failed to get status for pod" podUID="158c5179-aee8-4e5b-8a39-38d19808b3fd" pod="openshift-marketplace/redhat-operators-gwpxn" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-gwpxn\": dial tcp 38.102.83.73:6443: connect: connection refused" Dec 06 15:36:10 crc kubenswrapper[5003]: I1206 15:36:10.241995 5003 status_manager.go:851] "Failed to get status for pod" podUID="7a65c8f8-e5cf-471d-bbf1-2c541164cdf8" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.73:6443: connect: connection refused" Dec 06 15:36:10 crc kubenswrapper[5003]: I1206 15:36:10.242532 5003 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.73:6443: connect: connection refused" Dec 06 15:36:10 crc kubenswrapper[5003]: I1206 15:36:10.286445 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/158c5179-aee8-4e5b-8a39-38d19808b3fd-catalog-content\") pod \"158c5179-aee8-4e5b-8a39-38d19808b3fd\" (UID: \"158c5179-aee8-4e5b-8a39-38d19808b3fd\") " Dec 06 15:36:10 crc kubenswrapper[5003]: I1206 15:36:10.286650 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-746fx\" (UniqueName: \"kubernetes.io/projected/158c5179-aee8-4e5b-8a39-38d19808b3fd-kube-api-access-746fx\") pod \"158c5179-aee8-4e5b-8a39-38d19808b3fd\" (UID: \"158c5179-aee8-4e5b-8a39-38d19808b3fd\") " Dec 06 15:36:10 crc kubenswrapper[5003]: I1206 15:36:10.286745 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/158c5179-aee8-4e5b-8a39-38d19808b3fd-utilities\") pod \"158c5179-aee8-4e5b-8a39-38d19808b3fd\" (UID: \"158c5179-aee8-4e5b-8a39-38d19808b3fd\") " Dec 06 15:36:10 crc kubenswrapper[5003]: I1206 15:36:10.289142 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/158c5179-aee8-4e5b-8a39-38d19808b3fd-utilities" (OuterVolumeSpecName: "utilities") pod "158c5179-aee8-4e5b-8a39-38d19808b3fd" (UID: "158c5179-aee8-4e5b-8a39-38d19808b3fd"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 15:36:10 crc kubenswrapper[5003]: I1206 15:36:10.297309 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/158c5179-aee8-4e5b-8a39-38d19808b3fd-kube-api-access-746fx" (OuterVolumeSpecName: "kube-api-access-746fx") pod "158c5179-aee8-4e5b-8a39-38d19808b3fd" (UID: "158c5179-aee8-4e5b-8a39-38d19808b3fd"). InnerVolumeSpecName "kube-api-access-746fx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 15:36:10 crc kubenswrapper[5003]: I1206 15:36:10.389021 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-746fx\" (UniqueName: \"kubernetes.io/projected/158c5179-aee8-4e5b-8a39-38d19808b3fd-kube-api-access-746fx\") on node \"crc\" DevicePath \"\"" Dec 06 15:36:10 crc kubenswrapper[5003]: I1206 15:36:10.389058 5003 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/158c5179-aee8-4e5b-8a39-38d19808b3fd-utilities\") on node \"crc\" DevicePath \"\"" Dec 06 15:36:10 crc kubenswrapper[5003]: I1206 15:36:10.474519 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 06 15:36:10 crc kubenswrapper[5003]: I1206 15:36:10.475330 5003 status_manager.go:851] "Failed to get status for pod" podUID="158c5179-aee8-4e5b-8a39-38d19808b3fd" pod="openshift-marketplace/redhat-operators-gwpxn" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-gwpxn\": dial tcp 38.102.83.73:6443: connect: connection refused" Dec 06 15:36:10 crc kubenswrapper[5003]: I1206 15:36:10.475778 5003 status_manager.go:851] "Failed to get status for pod" podUID="7a65c8f8-e5cf-471d-bbf1-2c541164cdf8" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.73:6443: connect: connection refused" Dec 06 15:36:10 crc kubenswrapper[5003]: I1206 15:36:10.476092 5003 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.73:6443: connect: connection refused" Dec 06 15:36:10 crc kubenswrapper[5003]: I1206 15:36:10.590737 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/7a65c8f8-e5cf-471d-bbf1-2c541164cdf8-kubelet-dir\") pod \"7a65c8f8-e5cf-471d-bbf1-2c541164cdf8\" (UID: \"7a65c8f8-e5cf-471d-bbf1-2c541164cdf8\") " Dec 06 15:36:10 crc kubenswrapper[5003]: I1206 15:36:10.590855 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/7a65c8f8-e5cf-471d-bbf1-2c541164cdf8-var-lock\") pod \"7a65c8f8-e5cf-471d-bbf1-2c541164cdf8\" (UID: \"7a65c8f8-e5cf-471d-bbf1-2c541164cdf8\") " Dec 06 15:36:10 crc kubenswrapper[5003]: I1206 15:36:10.590882 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/7a65c8f8-e5cf-471d-bbf1-2c541164cdf8-kube-api-access\") pod \"7a65c8f8-e5cf-471d-bbf1-2c541164cdf8\" (UID: \"7a65c8f8-e5cf-471d-bbf1-2c541164cdf8\") " Dec 06 15:36:10 crc kubenswrapper[5003]: I1206 15:36:10.590871 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/7a65c8f8-e5cf-471d-bbf1-2c541164cdf8-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "7a65c8f8-e5cf-471d-bbf1-2c541164cdf8" (UID: "7a65c8f8-e5cf-471d-bbf1-2c541164cdf8"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 06 15:36:10 crc kubenswrapper[5003]: I1206 15:36:10.590942 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/7a65c8f8-e5cf-471d-bbf1-2c541164cdf8-var-lock" (OuterVolumeSpecName: "var-lock") pod "7a65c8f8-e5cf-471d-bbf1-2c541164cdf8" (UID: "7a65c8f8-e5cf-471d-bbf1-2c541164cdf8"). InnerVolumeSpecName "var-lock". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 06 15:36:10 crc kubenswrapper[5003]: I1206 15:36:10.591103 5003 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/7a65c8f8-e5cf-471d-bbf1-2c541164cdf8-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 06 15:36:10 crc kubenswrapper[5003]: I1206 15:36:10.591140 5003 reconciler_common.go:293] "Volume detached for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/7a65c8f8-e5cf-471d-bbf1-2c541164cdf8-var-lock\") on node \"crc\" DevicePath \"\"" Dec 06 15:36:10 crc kubenswrapper[5003]: I1206 15:36:10.594604 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7a65c8f8-e5cf-471d-bbf1-2c541164cdf8-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "7a65c8f8-e5cf-471d-bbf1-2c541164cdf8" (UID: "7a65c8f8-e5cf-471d-bbf1-2c541164cdf8"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 15:36:10 crc kubenswrapper[5003]: I1206 15:36:10.697301 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/7a65c8f8-e5cf-471d-bbf1-2c541164cdf8-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 06 15:36:10 crc kubenswrapper[5003]: I1206 15:36:10.757461 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/158c5179-aee8-4e5b-8a39-38d19808b3fd-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "158c5179-aee8-4e5b-8a39-38d19808b3fd" (UID: "158c5179-aee8-4e5b-8a39-38d19808b3fd"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 15:36:10 crc kubenswrapper[5003]: I1206 15:36:10.798290 5003 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/158c5179-aee8-4e5b-8a39-38d19808b3fd-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 06 15:36:11 crc kubenswrapper[5003]: E1206 15:36:11.155689 5003 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.73:6443: connect: connection refused" Dec 06 15:36:11 crc kubenswrapper[5003]: E1206 15:36:11.156322 5003 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.73:6443: connect: connection refused" Dec 06 15:36:11 crc kubenswrapper[5003]: E1206 15:36:11.156906 5003 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.73:6443: connect: connection refused" Dec 06 15:36:11 crc kubenswrapper[5003]: E1206 15:36:11.157321 5003 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.73:6443: connect: connection refused" Dec 06 15:36:11 crc kubenswrapper[5003]: E1206 15:36:11.157633 5003 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.73:6443: connect: connection refused" Dec 06 15:36:11 crc kubenswrapper[5003]: I1206 15:36:11.157669 5003 controller.go:115] "failed to update lease using latest lease, fallback to ensure lease" err="failed 5 attempts to update lease" Dec 06 15:36:11 crc kubenswrapper[5003]: E1206 15:36:11.157968 5003 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.73:6443: connect: connection refused" interval="200ms" Dec 06 15:36:11 crc kubenswrapper[5003]: I1206 15:36:11.210448 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gwpxn" event={"ID":"158c5179-aee8-4e5b-8a39-38d19808b3fd","Type":"ContainerDied","Data":"e79778a80554c3eb28479826ee0b744470c9f5722b516f95e8f11e12949cf9ad"} Dec 06 15:36:11 crc kubenswrapper[5003]: I1206 15:36:11.210739 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-gwpxn" Dec 06 15:36:11 crc kubenswrapper[5003]: I1206 15:36:11.212189 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"7a65c8f8-e5cf-471d-bbf1-2c541164cdf8","Type":"ContainerDied","Data":"60b34747c08e62f951fb6f31f8923a5aedeedbd97bda433dac45235b8626b41a"} Dec 06 15:36:11 crc kubenswrapper[5003]: I1206 15:36:11.212216 5003 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="60b34747c08e62f951fb6f31f8923a5aedeedbd97bda433dac45235b8626b41a" Dec 06 15:36:11 crc kubenswrapper[5003]: I1206 15:36:11.212274 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 06 15:36:11 crc kubenswrapper[5003]: I1206 15:36:11.212501 5003 status_manager.go:851] "Failed to get status for pod" podUID="7a65c8f8-e5cf-471d-bbf1-2c541164cdf8" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.73:6443: connect: connection refused" Dec 06 15:36:11 crc kubenswrapper[5003]: I1206 15:36:11.212694 5003 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.73:6443: connect: connection refused" Dec 06 15:36:11 crc kubenswrapper[5003]: I1206 15:36:11.212915 5003 status_manager.go:851] "Failed to get status for pod" podUID="158c5179-aee8-4e5b-8a39-38d19808b3fd" pod="openshift-marketplace/redhat-operators-gwpxn" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-gwpxn\": dial tcp 38.102.83.73:6443: connect: connection refused" Dec 06 15:36:11 crc kubenswrapper[5003]: I1206 15:36:11.229690 5003 status_manager.go:851] "Failed to get status for pod" podUID="158c5179-aee8-4e5b-8a39-38d19808b3fd" pod="openshift-marketplace/redhat-operators-gwpxn" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-gwpxn\": dial tcp 38.102.83.73:6443: connect: connection refused" Dec 06 15:36:11 crc kubenswrapper[5003]: I1206 15:36:11.229911 5003 status_manager.go:851] "Failed to get status for pod" podUID="7a65c8f8-e5cf-471d-bbf1-2c541164cdf8" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.73:6443: connect: connection refused" Dec 06 15:36:11 crc kubenswrapper[5003]: I1206 15:36:11.230110 5003 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.73:6443: connect: connection refused" Dec 06 15:36:11 crc kubenswrapper[5003]: I1206 15:36:11.232720 5003 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.73:6443: connect: connection refused" Dec 06 15:36:11 crc kubenswrapper[5003]: I1206 15:36:11.232947 5003 status_manager.go:851] "Failed to get status for pod" podUID="158c5179-aee8-4e5b-8a39-38d19808b3fd" pod="openshift-marketplace/redhat-operators-gwpxn" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-gwpxn\": dial tcp 38.102.83.73:6443: connect: connection refused" Dec 06 15:36:11 crc kubenswrapper[5003]: I1206 15:36:11.233154 5003 status_manager.go:851] "Failed to get status for pod" podUID="7a65c8f8-e5cf-471d-bbf1-2c541164cdf8" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.73:6443: connect: connection refused" Dec 06 15:36:11 crc kubenswrapper[5003]: E1206 15:36:11.358810 5003 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.73:6443: connect: connection refused" interval="400ms" Dec 06 15:36:11 crc kubenswrapper[5003]: I1206 15:36:11.714838 5003 status_manager.go:851] "Failed to get status for pod" podUID="158c5179-aee8-4e5b-8a39-38d19808b3fd" pod="openshift-marketplace/redhat-operators-gwpxn" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-gwpxn\": dial tcp 38.102.83.73:6443: connect: connection refused" Dec 06 15:36:11 crc kubenswrapper[5003]: I1206 15:36:11.715377 5003 status_manager.go:851] "Failed to get status for pod" podUID="7a65c8f8-e5cf-471d-bbf1-2c541164cdf8" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.73:6443: connect: connection refused" Dec 06 15:36:11 crc kubenswrapper[5003]: I1206 15:36:11.716138 5003 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.73:6443: connect: connection refused" Dec 06 15:36:11 crc kubenswrapper[5003]: E1206 15:36:11.760199 5003 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.73:6443: connect: connection refused" interval="800ms" Dec 06 15:36:11 crc kubenswrapper[5003]: E1206 15:36:11.786539 5003 desired_state_of_world_populator.go:312] "Error processing volume" err="error processing PVC openshift-image-registry/crc-image-registry-storage: failed to fetch PVC from API server: Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-image-registry/persistentvolumeclaims/crc-image-registry-storage\": dial tcp 38.102.83.73:6443: connect: connection refused" pod="openshift-image-registry/image-registry-697d97f7c8-tvwvh" volumeName="registry-storage" Dec 06 15:36:11 crc kubenswrapper[5003]: I1206 15:36:11.946584 5003 scope.go:117] "RemoveContainer" containerID="0c74c29d9df46fb3d47a5b5adb74059b162b5cff305b3b5845b3876bd7ae87f0" Dec 06 15:36:11 crc kubenswrapper[5003]: I1206 15:36:11.963445 5003 scope.go:117] "RemoveContainer" containerID="81056a0f7fdeec459dbb4a85d703e18881bd9542ae62284cf33f2b4f02ceecd5" Dec 06 15:36:11 crc kubenswrapper[5003]: I1206 15:36:11.986205 5003 scope.go:117] "RemoveContainer" containerID="4850de3c644d58103f4e9cce2a671751bccf2a03c53b53d46feba76883276880" Dec 06 15:36:12 crc kubenswrapper[5003]: I1206 15:36:12.229550 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 06 15:36:12 crc kubenswrapper[5003]: E1206 15:36:12.561112 5003 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.73:6443: connect: connection refused" interval="1.6s" Dec 06 15:36:13 crc kubenswrapper[5003]: I1206 15:36:13.243382 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 06 15:36:13 crc kubenswrapper[5003]: I1206 15:36:13.244277 5003 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="c9f683cdba22afa5d1d069cc32c6c83addf344c8b0ba3b39b7a2ca527408922c" exitCode=0 Dec 06 15:36:13 crc kubenswrapper[5003]: I1206 15:36:13.342313 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 06 15:36:13 crc kubenswrapper[5003]: I1206 15:36:13.343270 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 06 15:36:13 crc kubenswrapper[5003]: I1206 15:36:13.343851 5003 status_manager.go:851] "Failed to get status for pod" podUID="158c5179-aee8-4e5b-8a39-38d19808b3fd" pod="openshift-marketplace/redhat-operators-gwpxn" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-gwpxn\": dial tcp 38.102.83.73:6443: connect: connection refused" Dec 06 15:36:13 crc kubenswrapper[5003]: I1206 15:36:13.344125 5003 status_manager.go:851] "Failed to get status for pod" podUID="7a65c8f8-e5cf-471d-bbf1-2c541164cdf8" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.73:6443: connect: connection refused" Dec 06 15:36:13 crc kubenswrapper[5003]: I1206 15:36:13.344400 5003 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.73:6443: connect: connection refused" Dec 06 15:36:13 crc kubenswrapper[5003]: I1206 15:36:13.344704 5003 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.73:6443: connect: connection refused" Dec 06 15:36:13 crc kubenswrapper[5003]: I1206 15:36:13.442032 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Dec 06 15:36:13 crc kubenswrapper[5003]: I1206 15:36:13.442116 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Dec 06 15:36:13 crc kubenswrapper[5003]: I1206 15:36:13.442136 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Dec 06 15:36:13 crc kubenswrapper[5003]: I1206 15:36:13.442258 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir" (OuterVolumeSpecName: "cert-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "cert-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 06 15:36:13 crc kubenswrapper[5003]: I1206 15:36:13.442265 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 06 15:36:13 crc kubenswrapper[5003]: I1206 15:36:13.442369 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir" (OuterVolumeSpecName: "resource-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 06 15:36:13 crc kubenswrapper[5003]: I1206 15:36:13.442606 5003 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") on node \"crc\" DevicePath \"\"" Dec 06 15:36:13 crc kubenswrapper[5003]: I1206 15:36:13.442622 5003 reconciler_common.go:293] "Volume detached for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") on node \"crc\" DevicePath \"\"" Dec 06 15:36:13 crc kubenswrapper[5003]: I1206 15:36:13.442634 5003 reconciler_common.go:293] "Volume detached for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") on node \"crc\" DevicePath \"\"" Dec 06 15:36:13 crc kubenswrapper[5003]: I1206 15:36:13.720343 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f4b27818a5e8e43d0dc095d08835c792" path="/var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/volumes" Dec 06 15:36:14 crc kubenswrapper[5003]: E1206 15:36:14.162516 5003 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.73:6443: connect: connection refused" interval="3.2s" Dec 06 15:36:14 crc kubenswrapper[5003]: I1206 15:36:14.254828 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 06 15:36:14 crc kubenswrapper[5003]: I1206 15:36:14.256110 5003 scope.go:117] "RemoveContainer" containerID="ae2215e89d65c7bd69288cda74cdd83f0349f57b47f80ff1fa03d60a484558b5" Dec 06 15:36:14 crc kubenswrapper[5003]: I1206 15:36:14.256236 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 06 15:36:14 crc kubenswrapper[5003]: I1206 15:36:14.257101 5003 status_manager.go:851] "Failed to get status for pod" podUID="158c5179-aee8-4e5b-8a39-38d19808b3fd" pod="openshift-marketplace/redhat-operators-gwpxn" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-gwpxn\": dial tcp 38.102.83.73:6443: connect: connection refused" Dec 06 15:36:14 crc kubenswrapper[5003]: I1206 15:36:14.257447 5003 status_manager.go:851] "Failed to get status for pod" podUID="7a65c8f8-e5cf-471d-bbf1-2c541164cdf8" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.73:6443: connect: connection refused" Dec 06 15:36:14 crc kubenswrapper[5003]: I1206 15:36:14.257884 5003 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.73:6443: connect: connection refused" Dec 06 15:36:14 crc kubenswrapper[5003]: I1206 15:36:14.258129 5003 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.73:6443: connect: connection refused" Dec 06 15:36:14 crc kubenswrapper[5003]: I1206 15:36:14.259916 5003 status_manager.go:851] "Failed to get status for pod" podUID="158c5179-aee8-4e5b-8a39-38d19808b3fd" pod="openshift-marketplace/redhat-operators-gwpxn" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-gwpxn\": dial tcp 38.102.83.73:6443: connect: connection refused" Dec 06 15:36:14 crc kubenswrapper[5003]: I1206 15:36:14.260208 5003 status_manager.go:851] "Failed to get status for pod" podUID="7a65c8f8-e5cf-471d-bbf1-2c541164cdf8" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.73:6443: connect: connection refused" Dec 06 15:36:14 crc kubenswrapper[5003]: I1206 15:36:14.260553 5003 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.73:6443: connect: connection refused" Dec 06 15:36:14 crc kubenswrapper[5003]: I1206 15:36:14.260843 5003 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.73:6443: connect: connection refused" Dec 06 15:36:14 crc kubenswrapper[5003]: I1206 15:36:14.282681 5003 scope.go:117] "RemoveContainer" containerID="b6013b3a95ef99138e6ea971b51a45f65923c09435a2595bba7af91eebb28d76" Dec 06 15:36:14 crc kubenswrapper[5003]: I1206 15:36:14.297408 5003 scope.go:117] "RemoveContainer" containerID="c777cc5d07ff005aa8001da8a566484710f0253e4a6061e3297a884c6153a7d0" Dec 06 15:36:14 crc kubenswrapper[5003]: I1206 15:36:14.316021 5003 scope.go:117] "RemoveContainer" containerID="9ac8291e7fa9a5ff379474802b6eae771542705aef4c443cc3123837d10dd9e5" Dec 06 15:36:14 crc kubenswrapper[5003]: I1206 15:36:14.334206 5003 scope.go:117] "RemoveContainer" containerID="c9f683cdba22afa5d1d069cc32c6c83addf344c8b0ba3b39b7a2ca527408922c" Dec 06 15:36:14 crc kubenswrapper[5003]: I1206 15:36:14.349921 5003 scope.go:117] "RemoveContainer" containerID="4256540a96bdcd018eb514cf70eea305513bb418afa89c8bfa5179c67822380e" Dec 06 15:36:14 crc kubenswrapper[5003]: I1206 15:36:14.467108 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-authentication/oauth-openshift-558db77b4-tsg4h" podUID="0fc961e1-eee3-4fd5-ac99-56b85320740b" containerName="oauth-openshift" containerID="cri-o://fc154be70cccef24292ecbfe62be9985bf7018b340d737f07c6a0c02e9821407" gracePeriod=15 Dec 06 15:36:14 crc kubenswrapper[5003]: I1206 15:36:14.882470 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-tsg4h" Dec 06 15:36:14 crc kubenswrapper[5003]: I1206 15:36:14.883366 5003 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.73:6443: connect: connection refused" Dec 06 15:36:14 crc kubenswrapper[5003]: I1206 15:36:14.884244 5003 status_manager.go:851] "Failed to get status for pod" podUID="158c5179-aee8-4e5b-8a39-38d19808b3fd" pod="openshift-marketplace/redhat-operators-gwpxn" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-gwpxn\": dial tcp 38.102.83.73:6443: connect: connection refused" Dec 06 15:36:14 crc kubenswrapper[5003]: I1206 15:36:14.884608 5003 status_manager.go:851] "Failed to get status for pod" podUID="7a65c8f8-e5cf-471d-bbf1-2c541164cdf8" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.73:6443: connect: connection refused" Dec 06 15:36:14 crc kubenswrapper[5003]: I1206 15:36:14.884895 5003 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.73:6443: connect: connection refused" Dec 06 15:36:14 crc kubenswrapper[5003]: I1206 15:36:14.885303 5003 status_manager.go:851] "Failed to get status for pod" podUID="0fc961e1-eee3-4fd5-ac99-56b85320740b" pod="openshift-authentication/oauth-openshift-558db77b4-tsg4h" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-tsg4h\": dial tcp 38.102.83.73:6443: connect: connection refused" Dec 06 15:36:14 crc kubenswrapper[5003]: I1206 15:36:14.961900 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2g4n6\" (UniqueName: \"kubernetes.io/projected/0fc961e1-eee3-4fd5-ac99-56b85320740b-kube-api-access-2g4n6\") pod \"0fc961e1-eee3-4fd5-ac99-56b85320740b\" (UID: \"0fc961e1-eee3-4fd5-ac99-56b85320740b\") " Dec 06 15:36:14 crc kubenswrapper[5003]: I1206 15:36:14.961978 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/0fc961e1-eee3-4fd5-ac99-56b85320740b-v4-0-config-user-template-error\") pod \"0fc961e1-eee3-4fd5-ac99-56b85320740b\" (UID: \"0fc961e1-eee3-4fd5-ac99-56b85320740b\") " Dec 06 15:36:14 crc kubenswrapper[5003]: I1206 15:36:14.962015 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/0fc961e1-eee3-4fd5-ac99-56b85320740b-v4-0-config-user-template-provider-selection\") pod \"0fc961e1-eee3-4fd5-ac99-56b85320740b\" (UID: \"0fc961e1-eee3-4fd5-ac99-56b85320740b\") " Dec 06 15:36:14 crc kubenswrapper[5003]: I1206 15:36:14.962046 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/0fc961e1-eee3-4fd5-ac99-56b85320740b-v4-0-config-system-service-ca\") pod \"0fc961e1-eee3-4fd5-ac99-56b85320740b\" (UID: \"0fc961e1-eee3-4fd5-ac99-56b85320740b\") " Dec 06 15:36:14 crc kubenswrapper[5003]: I1206 15:36:14.962078 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/0fc961e1-eee3-4fd5-ac99-56b85320740b-v4-0-config-system-serving-cert\") pod \"0fc961e1-eee3-4fd5-ac99-56b85320740b\" (UID: \"0fc961e1-eee3-4fd5-ac99-56b85320740b\") " Dec 06 15:36:14 crc kubenswrapper[5003]: I1206 15:36:14.962109 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/0fc961e1-eee3-4fd5-ac99-56b85320740b-v4-0-config-system-session\") pod \"0fc961e1-eee3-4fd5-ac99-56b85320740b\" (UID: \"0fc961e1-eee3-4fd5-ac99-56b85320740b\") " Dec 06 15:36:14 crc kubenswrapper[5003]: I1206 15:36:14.962131 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/0fc961e1-eee3-4fd5-ac99-56b85320740b-v4-0-config-system-cliconfig\") pod \"0fc961e1-eee3-4fd5-ac99-56b85320740b\" (UID: \"0fc961e1-eee3-4fd5-ac99-56b85320740b\") " Dec 06 15:36:14 crc kubenswrapper[5003]: I1206 15:36:14.962164 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/0fc961e1-eee3-4fd5-ac99-56b85320740b-v4-0-config-system-ocp-branding-template\") pod \"0fc961e1-eee3-4fd5-ac99-56b85320740b\" (UID: \"0fc961e1-eee3-4fd5-ac99-56b85320740b\") " Dec 06 15:36:14 crc kubenswrapper[5003]: I1206 15:36:14.962202 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/0fc961e1-eee3-4fd5-ac99-56b85320740b-v4-0-config-system-router-certs\") pod \"0fc961e1-eee3-4fd5-ac99-56b85320740b\" (UID: \"0fc961e1-eee3-4fd5-ac99-56b85320740b\") " Dec 06 15:36:14 crc kubenswrapper[5003]: I1206 15:36:14.962247 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/0fc961e1-eee3-4fd5-ac99-56b85320740b-audit-dir\") pod \"0fc961e1-eee3-4fd5-ac99-56b85320740b\" (UID: \"0fc961e1-eee3-4fd5-ac99-56b85320740b\") " Dec 06 15:36:14 crc kubenswrapper[5003]: I1206 15:36:14.962284 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/0fc961e1-eee3-4fd5-ac99-56b85320740b-v4-0-config-user-idp-0-file-data\") pod \"0fc961e1-eee3-4fd5-ac99-56b85320740b\" (UID: \"0fc961e1-eee3-4fd5-ac99-56b85320740b\") " Dec 06 15:36:14 crc kubenswrapper[5003]: I1206 15:36:14.962327 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/0fc961e1-eee3-4fd5-ac99-56b85320740b-v4-0-config-user-template-login\") pod \"0fc961e1-eee3-4fd5-ac99-56b85320740b\" (UID: \"0fc961e1-eee3-4fd5-ac99-56b85320740b\") " Dec 06 15:36:14 crc kubenswrapper[5003]: I1206 15:36:14.962349 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/0fc961e1-eee3-4fd5-ac99-56b85320740b-audit-policies\") pod \"0fc961e1-eee3-4fd5-ac99-56b85320740b\" (UID: \"0fc961e1-eee3-4fd5-ac99-56b85320740b\") " Dec 06 15:36:14 crc kubenswrapper[5003]: I1206 15:36:14.962375 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/0fc961e1-eee3-4fd5-ac99-56b85320740b-v4-0-config-system-trusted-ca-bundle\") pod \"0fc961e1-eee3-4fd5-ac99-56b85320740b\" (UID: \"0fc961e1-eee3-4fd5-ac99-56b85320740b\") " Dec 06 15:36:14 crc kubenswrapper[5003]: I1206 15:36:14.963656 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0fc961e1-eee3-4fd5-ac99-56b85320740b-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "0fc961e1-eee3-4fd5-ac99-56b85320740b" (UID: "0fc961e1-eee3-4fd5-ac99-56b85320740b"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 15:36:14 crc kubenswrapper[5003]: I1206 15:36:14.963699 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0fc961e1-eee3-4fd5-ac99-56b85320740b-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "0fc961e1-eee3-4fd5-ac99-56b85320740b" (UID: "0fc961e1-eee3-4fd5-ac99-56b85320740b"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 15:36:14 crc kubenswrapper[5003]: I1206 15:36:14.963927 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/0fc961e1-eee3-4fd5-ac99-56b85320740b-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "0fc961e1-eee3-4fd5-ac99-56b85320740b" (UID: "0fc961e1-eee3-4fd5-ac99-56b85320740b"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 06 15:36:14 crc kubenswrapper[5003]: I1206 15:36:14.964103 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0fc961e1-eee3-4fd5-ac99-56b85320740b-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "0fc961e1-eee3-4fd5-ac99-56b85320740b" (UID: "0fc961e1-eee3-4fd5-ac99-56b85320740b"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 15:36:14 crc kubenswrapper[5003]: I1206 15:36:14.964213 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0fc961e1-eee3-4fd5-ac99-56b85320740b-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "0fc961e1-eee3-4fd5-ac99-56b85320740b" (UID: "0fc961e1-eee3-4fd5-ac99-56b85320740b"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 15:36:14 crc kubenswrapper[5003]: I1206 15:36:14.969174 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0fc961e1-eee3-4fd5-ac99-56b85320740b-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "0fc961e1-eee3-4fd5-ac99-56b85320740b" (UID: "0fc961e1-eee3-4fd5-ac99-56b85320740b"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 15:36:14 crc kubenswrapper[5003]: I1206 15:36:14.969420 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0fc961e1-eee3-4fd5-ac99-56b85320740b-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "0fc961e1-eee3-4fd5-ac99-56b85320740b" (UID: "0fc961e1-eee3-4fd5-ac99-56b85320740b"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 15:36:14 crc kubenswrapper[5003]: I1206 15:36:14.969960 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0fc961e1-eee3-4fd5-ac99-56b85320740b-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "0fc961e1-eee3-4fd5-ac99-56b85320740b" (UID: "0fc961e1-eee3-4fd5-ac99-56b85320740b"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 15:36:14 crc kubenswrapper[5003]: I1206 15:36:14.969996 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0fc961e1-eee3-4fd5-ac99-56b85320740b-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "0fc961e1-eee3-4fd5-ac99-56b85320740b" (UID: "0fc961e1-eee3-4fd5-ac99-56b85320740b"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 15:36:14 crc kubenswrapper[5003]: I1206 15:36:14.970240 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0fc961e1-eee3-4fd5-ac99-56b85320740b-kube-api-access-2g4n6" (OuterVolumeSpecName: "kube-api-access-2g4n6") pod "0fc961e1-eee3-4fd5-ac99-56b85320740b" (UID: "0fc961e1-eee3-4fd5-ac99-56b85320740b"). InnerVolumeSpecName "kube-api-access-2g4n6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 15:36:14 crc kubenswrapper[5003]: I1206 15:36:14.970471 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0fc961e1-eee3-4fd5-ac99-56b85320740b-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "0fc961e1-eee3-4fd5-ac99-56b85320740b" (UID: "0fc961e1-eee3-4fd5-ac99-56b85320740b"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 15:36:14 crc kubenswrapper[5003]: I1206 15:36:14.972748 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0fc961e1-eee3-4fd5-ac99-56b85320740b-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "0fc961e1-eee3-4fd5-ac99-56b85320740b" (UID: "0fc961e1-eee3-4fd5-ac99-56b85320740b"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 15:36:14 crc kubenswrapper[5003]: I1206 15:36:14.974857 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0fc961e1-eee3-4fd5-ac99-56b85320740b-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "0fc961e1-eee3-4fd5-ac99-56b85320740b" (UID: "0fc961e1-eee3-4fd5-ac99-56b85320740b"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 15:36:14 crc kubenswrapper[5003]: I1206 15:36:14.981805 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0fc961e1-eee3-4fd5-ac99-56b85320740b-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "0fc961e1-eee3-4fd5-ac99-56b85320740b" (UID: "0fc961e1-eee3-4fd5-ac99-56b85320740b"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 15:36:15 crc kubenswrapper[5003]: I1206 15:36:15.063964 5003 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/0fc961e1-eee3-4fd5-ac99-56b85320740b-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Dec 06 15:36:15 crc kubenswrapper[5003]: I1206 15:36:15.064005 5003 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/0fc961e1-eee3-4fd5-ac99-56b85320740b-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Dec 06 15:36:15 crc kubenswrapper[5003]: I1206 15:36:15.064021 5003 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/0fc961e1-eee3-4fd5-ac99-56b85320740b-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Dec 06 15:36:15 crc kubenswrapper[5003]: I1206 15:36:15.064032 5003 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/0fc961e1-eee3-4fd5-ac99-56b85320740b-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Dec 06 15:36:15 crc kubenswrapper[5003]: I1206 15:36:15.064044 5003 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/0fc961e1-eee3-4fd5-ac99-56b85320740b-audit-dir\") on node \"crc\" DevicePath \"\"" Dec 06 15:36:15 crc kubenswrapper[5003]: I1206 15:36:15.064056 5003 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/0fc961e1-eee3-4fd5-ac99-56b85320740b-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Dec 06 15:36:15 crc kubenswrapper[5003]: I1206 15:36:15.064065 5003 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/0fc961e1-eee3-4fd5-ac99-56b85320740b-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Dec 06 15:36:15 crc kubenswrapper[5003]: I1206 15:36:15.064074 5003 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/0fc961e1-eee3-4fd5-ac99-56b85320740b-audit-policies\") on node \"crc\" DevicePath \"\"" Dec 06 15:36:15 crc kubenswrapper[5003]: I1206 15:36:15.064083 5003 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/0fc961e1-eee3-4fd5-ac99-56b85320740b-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 06 15:36:15 crc kubenswrapper[5003]: I1206 15:36:15.064094 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2g4n6\" (UniqueName: \"kubernetes.io/projected/0fc961e1-eee3-4fd5-ac99-56b85320740b-kube-api-access-2g4n6\") on node \"crc\" DevicePath \"\"" Dec 06 15:36:15 crc kubenswrapper[5003]: I1206 15:36:15.064102 5003 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/0fc961e1-eee3-4fd5-ac99-56b85320740b-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Dec 06 15:36:15 crc kubenswrapper[5003]: I1206 15:36:15.064113 5003 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/0fc961e1-eee3-4fd5-ac99-56b85320740b-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Dec 06 15:36:15 crc kubenswrapper[5003]: I1206 15:36:15.064122 5003 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/0fc961e1-eee3-4fd5-ac99-56b85320740b-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Dec 06 15:36:15 crc kubenswrapper[5003]: I1206 15:36:15.064131 5003 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/0fc961e1-eee3-4fd5-ac99-56b85320740b-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 06 15:36:15 crc kubenswrapper[5003]: I1206 15:36:15.263241 5003 generic.go:334] "Generic (PLEG): container finished" podID="0fc961e1-eee3-4fd5-ac99-56b85320740b" containerID="fc154be70cccef24292ecbfe62be9985bf7018b340d737f07c6a0c02e9821407" exitCode=0 Dec 06 15:36:15 crc kubenswrapper[5003]: I1206 15:36:15.263323 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-tsg4h" Dec 06 15:36:15 crc kubenswrapper[5003]: I1206 15:36:15.263342 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-tsg4h" event={"ID":"0fc961e1-eee3-4fd5-ac99-56b85320740b","Type":"ContainerDied","Data":"fc154be70cccef24292ecbfe62be9985bf7018b340d737f07c6a0c02e9821407"} Dec 06 15:36:15 crc kubenswrapper[5003]: I1206 15:36:15.263370 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-tsg4h" event={"ID":"0fc961e1-eee3-4fd5-ac99-56b85320740b","Type":"ContainerDied","Data":"559104660f3ef6ce4ea778f769f0da4ab45cc815885921946ac532b6d1abb9c4"} Dec 06 15:36:15 crc kubenswrapper[5003]: I1206 15:36:15.263386 5003 scope.go:117] "RemoveContainer" containerID="fc154be70cccef24292ecbfe62be9985bf7018b340d737f07c6a0c02e9821407" Dec 06 15:36:15 crc kubenswrapper[5003]: I1206 15:36:15.264061 5003 status_manager.go:851] "Failed to get status for pod" podUID="158c5179-aee8-4e5b-8a39-38d19808b3fd" pod="openshift-marketplace/redhat-operators-gwpxn" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-gwpxn\": dial tcp 38.102.83.73:6443: connect: connection refused" Dec 06 15:36:15 crc kubenswrapper[5003]: I1206 15:36:15.264375 5003 status_manager.go:851] "Failed to get status for pod" podUID="7a65c8f8-e5cf-471d-bbf1-2c541164cdf8" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.73:6443: connect: connection refused" Dec 06 15:36:15 crc kubenswrapper[5003]: I1206 15:36:15.264727 5003 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.73:6443: connect: connection refused" Dec 06 15:36:15 crc kubenswrapper[5003]: I1206 15:36:15.264932 5003 status_manager.go:851] "Failed to get status for pod" podUID="0fc961e1-eee3-4fd5-ac99-56b85320740b" pod="openshift-authentication/oauth-openshift-558db77b4-tsg4h" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-tsg4h\": dial tcp 38.102.83.73:6443: connect: connection refused" Dec 06 15:36:15 crc kubenswrapper[5003]: I1206 15:36:15.265132 5003 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.73:6443: connect: connection refused" Dec 06 15:36:15 crc kubenswrapper[5003]: I1206 15:36:15.290993 5003 status_manager.go:851] "Failed to get status for pod" podUID="158c5179-aee8-4e5b-8a39-38d19808b3fd" pod="openshift-marketplace/redhat-operators-gwpxn" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-gwpxn\": dial tcp 38.102.83.73:6443: connect: connection refused" Dec 06 15:36:15 crc kubenswrapper[5003]: I1206 15:36:15.291351 5003 status_manager.go:851] "Failed to get status for pod" podUID="7a65c8f8-e5cf-471d-bbf1-2c541164cdf8" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.73:6443: connect: connection refused" Dec 06 15:36:15 crc kubenswrapper[5003]: I1206 15:36:15.291878 5003 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.73:6443: connect: connection refused" Dec 06 15:36:15 crc kubenswrapper[5003]: I1206 15:36:15.292051 5003 status_manager.go:851] "Failed to get status for pod" podUID="0fc961e1-eee3-4fd5-ac99-56b85320740b" pod="openshift-authentication/oauth-openshift-558db77b4-tsg4h" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-tsg4h\": dial tcp 38.102.83.73:6443: connect: connection refused" Dec 06 15:36:15 crc kubenswrapper[5003]: I1206 15:36:15.292195 5003 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.73:6443: connect: connection refused" Dec 06 15:36:15 crc kubenswrapper[5003]: I1206 15:36:15.304795 5003 scope.go:117] "RemoveContainer" containerID="fc154be70cccef24292ecbfe62be9985bf7018b340d737f07c6a0c02e9821407" Dec 06 15:36:15 crc kubenswrapper[5003]: E1206 15:36:15.305781 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fc154be70cccef24292ecbfe62be9985bf7018b340d737f07c6a0c02e9821407\": container with ID starting with fc154be70cccef24292ecbfe62be9985bf7018b340d737f07c6a0c02e9821407 not found: ID does not exist" containerID="fc154be70cccef24292ecbfe62be9985bf7018b340d737f07c6a0c02e9821407" Dec 06 15:36:15 crc kubenswrapper[5003]: I1206 15:36:15.305836 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fc154be70cccef24292ecbfe62be9985bf7018b340d737f07c6a0c02e9821407"} err="failed to get container status \"fc154be70cccef24292ecbfe62be9985bf7018b340d737f07c6a0c02e9821407\": rpc error: code = NotFound desc = could not find container \"fc154be70cccef24292ecbfe62be9985bf7018b340d737f07c6a0c02e9821407\": container with ID starting with fc154be70cccef24292ecbfe62be9985bf7018b340d737f07c6a0c02e9821407 not found: ID does not exist" Dec 06 15:36:17 crc kubenswrapper[5003]: E1206 15:36:17.364586 5003 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.73:6443: connect: connection refused" interval="6.4s" Dec 06 15:36:18 crc kubenswrapper[5003]: E1206 15:36:18.305086 5003 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/events\": dial tcp 38.102.83.73:6443: connect: connection refused" event="&Event{ObjectMeta:{kube-apiserver-startup-monitor-crc.187eaa5102d89bae openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-startup-monitor-crc,UID:f85e55b1a89d02b0cb034b1ea31ed45a,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{startup-monitor},},Reason:Created,Message:Created container startup-monitor,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-06 15:36:08.892636078 +0000 UTC m=+247.425990459,LastTimestamp:2025-12-06 15:36:08.892636078 +0000 UTC m=+247.425990459,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Dec 06 15:36:19 crc kubenswrapper[5003]: E1206 15:36:19.872796 5003 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-06T15:36:19Z\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-06T15:36:19Z\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-06T15:36:19Z\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-06T15:36:19Z\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:15adb3b2133604b064893f8009a74145e4c8bb5b134d111346dcccbdd2aa9bc2\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:164fc35a19aa6cc886c8015c8ee3eba4895e76b1152cb9d795e4f3154a8533a3\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1610512706},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:544a01170a4aa6cf8322d5bffa5817113efd696e3c3e9bac6a29d2da9f9451e5\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:67f42a86b99b69b357285a6845977f967e6c825de2049c19620a78eaf99cebf3\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1222075732},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:3523faa7434bcaef341b7154f6919f25a1fcaa583614c94f5051332024556dd6\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:f8c8ed2e117f0e59ac547d4dea01ecc0c909006cccfbd646461171ffa7a8fb31\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1201849672},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1b1026c62413fa239fa4ff6541fe8bda656c1281867ad6ee2c848feccb13c97e\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:2b633ebdc901d19290af4dc2d09e2b59c504c0fc15a3fba410b0ce098e2d5753\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1141987142},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792}]}}\" for node \"crc\": Patch \"https://api-int.crc.testing:6443/api/v1/nodes/crc/status?timeout=10s\": dial tcp 38.102.83.73:6443: connect: connection refused" Dec 06 15:36:19 crc kubenswrapper[5003]: E1206 15:36:19.873519 5003 kubelet_node_status.go:585] "Error updating node status, will retry" err="error getting node \"crc\": Get \"https://api-int.crc.testing:6443/api/v1/nodes/crc?timeout=10s\": dial tcp 38.102.83.73:6443: connect: connection refused" Dec 06 15:36:19 crc kubenswrapper[5003]: E1206 15:36:19.874542 5003 kubelet_node_status.go:585] "Error updating node status, will retry" err="error getting node \"crc\": Get \"https://api-int.crc.testing:6443/api/v1/nodes/crc?timeout=10s\": dial tcp 38.102.83.73:6443: connect: connection refused" Dec 06 15:36:19 crc kubenswrapper[5003]: E1206 15:36:19.874906 5003 kubelet_node_status.go:585] "Error updating node status, will retry" err="error getting node \"crc\": Get \"https://api-int.crc.testing:6443/api/v1/nodes/crc?timeout=10s\": dial tcp 38.102.83.73:6443: connect: connection refused" Dec 06 15:36:19 crc kubenswrapper[5003]: E1206 15:36:19.875149 5003 kubelet_node_status.go:585] "Error updating node status, will retry" err="error getting node \"crc\": Get \"https://api-int.crc.testing:6443/api/v1/nodes/crc?timeout=10s\": dial tcp 38.102.83.73:6443: connect: connection refused" Dec 06 15:36:19 crc kubenswrapper[5003]: E1206 15:36:19.875167 5003 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 06 15:36:20 crc kubenswrapper[5003]: I1206 15:36:20.711650 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 06 15:36:20 crc kubenswrapper[5003]: I1206 15:36:20.713238 5003 status_manager.go:851] "Failed to get status for pod" podUID="158c5179-aee8-4e5b-8a39-38d19808b3fd" pod="openshift-marketplace/redhat-operators-gwpxn" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-gwpxn\": dial tcp 38.102.83.73:6443: connect: connection refused" Dec 06 15:36:20 crc kubenswrapper[5003]: I1206 15:36:20.713717 5003 status_manager.go:851] "Failed to get status for pod" podUID="7a65c8f8-e5cf-471d-bbf1-2c541164cdf8" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.73:6443: connect: connection refused" Dec 06 15:36:20 crc kubenswrapper[5003]: I1206 15:36:20.714050 5003 status_manager.go:851] "Failed to get status for pod" podUID="0fc961e1-eee3-4fd5-ac99-56b85320740b" pod="openshift-authentication/oauth-openshift-558db77b4-tsg4h" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-tsg4h\": dial tcp 38.102.83.73:6443: connect: connection refused" Dec 06 15:36:20 crc kubenswrapper[5003]: I1206 15:36:20.714331 5003 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.73:6443: connect: connection refused" Dec 06 15:36:20 crc kubenswrapper[5003]: I1206 15:36:20.731430 5003 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="5ae2d36f-5a31-4da3-aae8-0378c481f230" Dec 06 15:36:20 crc kubenswrapper[5003]: I1206 15:36:20.731511 5003 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="5ae2d36f-5a31-4da3-aae8-0378c481f230" Dec 06 15:36:20 crc kubenswrapper[5003]: E1206 15:36:20.732123 5003 mirror_client.go:138] "Failed deleting a mirror pod" err="Delete \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.73:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 06 15:36:20 crc kubenswrapper[5003]: I1206 15:36:20.732857 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 06 15:36:21 crc kubenswrapper[5003]: I1206 15:36:21.297100 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"772fb35c6ece56818b789ac744a1d278cc522a8218158ccc3e8e646767282850"} Dec 06 15:36:21 crc kubenswrapper[5003]: I1206 15:36:21.297681 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"7affa10d436e4b3d17779e78543584121d5064335da252526c85385a9d042936"} Dec 06 15:36:21 crc kubenswrapper[5003]: I1206 15:36:21.297954 5003 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="5ae2d36f-5a31-4da3-aae8-0378c481f230" Dec 06 15:36:21 crc kubenswrapper[5003]: I1206 15:36:21.297971 5003 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="5ae2d36f-5a31-4da3-aae8-0378c481f230" Dec 06 15:36:21 crc kubenswrapper[5003]: I1206 15:36:21.298352 5003 status_manager.go:851] "Failed to get status for pod" podUID="7a65c8f8-e5cf-471d-bbf1-2c541164cdf8" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.73:6443: connect: connection refused" Dec 06 15:36:21 crc kubenswrapper[5003]: E1206 15:36:21.298369 5003 mirror_client.go:138] "Failed deleting a mirror pod" err="Delete \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.73:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 06 15:36:21 crc kubenswrapper[5003]: I1206 15:36:21.298677 5003 status_manager.go:851] "Failed to get status for pod" podUID="0fc961e1-eee3-4fd5-ac99-56b85320740b" pod="openshift-authentication/oauth-openshift-558db77b4-tsg4h" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-tsg4h\": dial tcp 38.102.83.73:6443: connect: connection refused" Dec 06 15:36:21 crc kubenswrapper[5003]: I1206 15:36:21.298973 5003 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.73:6443: connect: connection refused" Dec 06 15:36:21 crc kubenswrapper[5003]: I1206 15:36:21.299284 5003 status_manager.go:851] "Failed to get status for pod" podUID="158c5179-aee8-4e5b-8a39-38d19808b3fd" pod="openshift-marketplace/redhat-operators-gwpxn" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-gwpxn\": dial tcp 38.102.83.73:6443: connect: connection refused" Dec 06 15:36:21 crc kubenswrapper[5003]: I1206 15:36:21.718213 5003 status_manager.go:851] "Failed to get status for pod" podUID="7a65c8f8-e5cf-471d-bbf1-2c541164cdf8" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.73:6443: connect: connection refused" Dec 06 15:36:21 crc kubenswrapper[5003]: I1206 15:36:21.718747 5003 status_manager.go:851] "Failed to get status for pod" podUID="0fc961e1-eee3-4fd5-ac99-56b85320740b" pod="openshift-authentication/oauth-openshift-558db77b4-tsg4h" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-tsg4h\": dial tcp 38.102.83.73:6443: connect: connection refused" Dec 06 15:36:21 crc kubenswrapper[5003]: I1206 15:36:21.719112 5003 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.73:6443: connect: connection refused" Dec 06 15:36:21 crc kubenswrapper[5003]: I1206 15:36:21.719720 5003 status_manager.go:851] "Failed to get status for pod" podUID="158c5179-aee8-4e5b-8a39-38d19808b3fd" pod="openshift-marketplace/redhat-operators-gwpxn" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-gwpxn\": dial tcp 38.102.83.73:6443: connect: connection refused" Dec 06 15:36:21 crc kubenswrapper[5003]: I1206 15:36:21.720315 5003 status_manager.go:851] "Failed to get status for pod" podUID="71bb4a3aecc4ba5b26c4b7318770ce13" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.73:6443: connect: connection refused" Dec 06 15:36:22 crc kubenswrapper[5003]: I1206 15:36:22.303620 5003 generic.go:334] "Generic (PLEG): container finished" podID="71bb4a3aecc4ba5b26c4b7318770ce13" containerID="772fb35c6ece56818b789ac744a1d278cc522a8218158ccc3e8e646767282850" exitCode=0 Dec 06 15:36:22 crc kubenswrapper[5003]: I1206 15:36:22.303671 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerDied","Data":"772fb35c6ece56818b789ac744a1d278cc522a8218158ccc3e8e646767282850"} Dec 06 15:36:22 crc kubenswrapper[5003]: I1206 15:36:22.303954 5003 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="5ae2d36f-5a31-4da3-aae8-0378c481f230" Dec 06 15:36:22 crc kubenswrapper[5003]: I1206 15:36:22.303969 5003 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="5ae2d36f-5a31-4da3-aae8-0378c481f230" Dec 06 15:36:22 crc kubenswrapper[5003]: E1206 15:36:22.304460 5003 mirror_client.go:138] "Failed deleting a mirror pod" err="Delete \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.73:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 06 15:36:22 crc kubenswrapper[5003]: I1206 15:36:22.304651 5003 status_manager.go:851] "Failed to get status for pod" podUID="158c5179-aee8-4e5b-8a39-38d19808b3fd" pod="openshift-marketplace/redhat-operators-gwpxn" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-gwpxn\": dial tcp 38.102.83.73:6443: connect: connection refused" Dec 06 15:36:22 crc kubenswrapper[5003]: I1206 15:36:22.305017 5003 status_manager.go:851] "Failed to get status for pod" podUID="71bb4a3aecc4ba5b26c4b7318770ce13" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.73:6443: connect: connection refused" Dec 06 15:36:22 crc kubenswrapper[5003]: I1206 15:36:22.305617 5003 status_manager.go:851] "Failed to get status for pod" podUID="7a65c8f8-e5cf-471d-bbf1-2c541164cdf8" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.73:6443: connect: connection refused" Dec 06 15:36:22 crc kubenswrapper[5003]: I1206 15:36:22.306043 5003 status_manager.go:851] "Failed to get status for pod" podUID="0fc961e1-eee3-4fd5-ac99-56b85320740b" pod="openshift-authentication/oauth-openshift-558db77b4-tsg4h" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-tsg4h\": dial tcp 38.102.83.73:6443: connect: connection refused" Dec 06 15:36:22 crc kubenswrapper[5003]: I1206 15:36:22.306431 5003 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.73:6443: connect: connection refused" Dec 06 15:36:23 crc kubenswrapper[5003]: I1206 15:36:23.316126 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"035ce35a7e39e750d6ecea3f9aef3d63ef4964f382d9af9d5dca91aa61e4961e"} Dec 06 15:36:23 crc kubenswrapper[5003]: I1206 15:36:23.316450 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"d7037ca9b140a16d445c4ed57037390c7dbf6c7bc46eedd8a7663dcc11ebcd52"} Dec 06 15:36:23 crc kubenswrapper[5003]: I1206 15:36:23.316462 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"7315c45863bce392e76ad4fc1a4383ea8c19e52d8d310bb469b41afb94e80d43"} Dec 06 15:36:23 crc kubenswrapper[5003]: I1206 15:36:23.319557 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Dec 06 15:36:23 crc kubenswrapper[5003]: I1206 15:36:23.319608 5003 generic.go:334] "Generic (PLEG): container finished" podID="f614b9022728cf315e60c057852e563e" containerID="6f5d5d47cea1d06df2f36a0a15126c2419e5051d84842c781921577bec3c65f1" exitCode=1 Dec 06 15:36:23 crc kubenswrapper[5003]: I1206 15:36:23.319640 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerDied","Data":"6f5d5d47cea1d06df2f36a0a15126c2419e5051d84842c781921577bec3c65f1"} Dec 06 15:36:23 crc kubenswrapper[5003]: I1206 15:36:23.320123 5003 scope.go:117] "RemoveContainer" containerID="6f5d5d47cea1d06df2f36a0a15126c2419e5051d84842c781921577bec3c65f1" Dec 06 15:36:24 crc kubenswrapper[5003]: I1206 15:36:24.328529 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Dec 06 15:36:24 crc kubenswrapper[5003]: I1206 15:36:24.328824 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"4f874ed8c60f2fb82c96596812bcd8d2706b8e179532086b790372da4104b8bd"} Dec 06 15:36:24 crc kubenswrapper[5003]: I1206 15:36:24.332593 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"47dd45097846a47d56a6945cce08770beac45e4fde3589e182f1f888fc86bdc5"} Dec 06 15:36:24 crc kubenswrapper[5003]: I1206 15:36:24.332636 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"54c24d1ed8f2110b6330494fec4903bc308903e938ad63184e84d1e77c31a69c"} Dec 06 15:36:24 crc kubenswrapper[5003]: I1206 15:36:24.332763 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 06 15:36:24 crc kubenswrapper[5003]: I1206 15:36:24.332842 5003 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="5ae2d36f-5a31-4da3-aae8-0378c481f230" Dec 06 15:36:24 crc kubenswrapper[5003]: I1206 15:36:24.332868 5003 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="5ae2d36f-5a31-4da3-aae8-0378c481f230" Dec 06 15:36:25 crc kubenswrapper[5003]: I1206 15:36:25.364250 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 06 15:36:25 crc kubenswrapper[5003]: I1206 15:36:25.733646 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 06 15:36:25 crc kubenswrapper[5003]: I1206 15:36:25.733698 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 06 15:36:25 crc kubenswrapper[5003]: I1206 15:36:25.738772 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 06 15:36:29 crc kubenswrapper[5003]: I1206 15:36:29.341236 5003 kubelet.go:1914] "Deleted mirror pod because it is outdated" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 06 15:36:30 crc kubenswrapper[5003]: I1206 15:36:30.363299 5003 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="5ae2d36f-5a31-4da3-aae8-0378c481f230" Dec 06 15:36:30 crc kubenswrapper[5003]: I1206 15:36:30.363342 5003 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="5ae2d36f-5a31-4da3-aae8-0378c481f230" Dec 06 15:36:30 crc kubenswrapper[5003]: I1206 15:36:30.369792 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 06 15:36:30 crc kubenswrapper[5003]: I1206 15:36:30.372279 5003 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="cb9a64ca-e6bb-42a5-b455-e5b0bf9cb1af" Dec 06 15:36:31 crc kubenswrapper[5003]: I1206 15:36:31.368603 5003 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="5ae2d36f-5a31-4da3-aae8-0378c481f230" Dec 06 15:36:31 crc kubenswrapper[5003]: I1206 15:36:31.368640 5003 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="5ae2d36f-5a31-4da3-aae8-0378c481f230" Dec 06 15:36:31 crc kubenswrapper[5003]: I1206 15:36:31.725558 5003 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="cb9a64ca-e6bb-42a5-b455-e5b0bf9cb1af" Dec 06 15:36:32 crc kubenswrapper[5003]: I1206 15:36:32.522714 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 06 15:36:32 crc kubenswrapper[5003]: I1206 15:36:32.522951 5003 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/kube-controller-manager namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10257/healthz\": dial tcp 192.168.126.11:10257: connect: connection refused" start-of-body= Dec 06 15:36:32 crc kubenswrapper[5003]: I1206 15:36:32.523003 5003 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="kube-controller-manager" probeResult="failure" output="Get \"https://192.168.126.11:10257/healthz\": dial tcp 192.168.126.11:10257: connect: connection refused" Dec 06 15:36:38 crc kubenswrapper[5003]: I1206 15:36:38.665855 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Dec 06 15:36:39 crc kubenswrapper[5003]: I1206 15:36:39.260808 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Dec 06 15:36:39 crc kubenswrapper[5003]: I1206 15:36:39.597573 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Dec 06 15:36:40 crc kubenswrapper[5003]: I1206 15:36:40.132659 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Dec 06 15:36:40 crc kubenswrapper[5003]: I1206 15:36:40.257863 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Dec 06 15:36:40 crc kubenswrapper[5003]: I1206 15:36:40.833566 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Dec 06 15:36:41 crc kubenswrapper[5003]: I1206 15:36:41.042940 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Dec 06 15:36:41 crc kubenswrapper[5003]: I1206 15:36:41.101888 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Dec 06 15:36:41 crc kubenswrapper[5003]: I1206 15:36:41.247238 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Dec 06 15:36:41 crc kubenswrapper[5003]: I1206 15:36:41.471064 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Dec 06 15:36:41 crc kubenswrapper[5003]: I1206 15:36:41.484354 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Dec 06 15:36:41 crc kubenswrapper[5003]: I1206 15:36:41.517802 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Dec 06 15:36:41 crc kubenswrapper[5003]: I1206 15:36:41.775431 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Dec 06 15:36:41 crc kubenswrapper[5003]: I1206 15:36:41.974089 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Dec 06 15:36:41 crc kubenswrapper[5003]: I1206 15:36:41.978341 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Dec 06 15:36:42 crc kubenswrapper[5003]: I1206 15:36:42.047422 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Dec 06 15:36:42 crc kubenswrapper[5003]: I1206 15:36:42.341050 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Dec 06 15:36:42 crc kubenswrapper[5003]: I1206 15:36:42.437375 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Dec 06 15:36:42 crc kubenswrapper[5003]: I1206 15:36:42.451883 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Dec 06 15:36:42 crc kubenswrapper[5003]: I1206 15:36:42.523144 5003 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/kube-controller-manager namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10257/healthz\": dial tcp 192.168.126.11:10257: connect: connection refused" start-of-body= Dec 06 15:36:42 crc kubenswrapper[5003]: I1206 15:36:42.523238 5003 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="kube-controller-manager" probeResult="failure" output="Get \"https://192.168.126.11:10257/healthz\": dial tcp 192.168.126.11:10257: connect: connection refused" Dec 06 15:36:42 crc kubenswrapper[5003]: I1206 15:36:42.553092 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Dec 06 15:36:42 crc kubenswrapper[5003]: I1206 15:36:42.593026 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Dec 06 15:36:42 crc kubenswrapper[5003]: I1206 15:36:42.684723 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Dec 06 15:36:42 crc kubenswrapper[5003]: I1206 15:36:42.699284 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Dec 06 15:36:43 crc kubenswrapper[5003]: I1206 15:36:43.039478 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Dec 06 15:36:43 crc kubenswrapper[5003]: I1206 15:36:43.108641 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Dec 06 15:36:43 crc kubenswrapper[5003]: I1206 15:36:43.142188 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Dec 06 15:36:43 crc kubenswrapper[5003]: I1206 15:36:43.225352 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Dec 06 15:36:43 crc kubenswrapper[5003]: I1206 15:36:43.237392 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Dec 06 15:36:43 crc kubenswrapper[5003]: I1206 15:36:43.315696 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 06 15:36:43 crc kubenswrapper[5003]: I1206 15:36:43.358104 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Dec 06 15:36:43 crc kubenswrapper[5003]: I1206 15:36:43.374798 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Dec 06 15:36:43 crc kubenswrapper[5003]: I1206 15:36:43.452931 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Dec 06 15:36:43 crc kubenswrapper[5003]: I1206 15:36:43.458910 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Dec 06 15:36:43 crc kubenswrapper[5003]: I1206 15:36:43.545034 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Dec 06 15:36:43 crc kubenswrapper[5003]: I1206 15:36:43.606566 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Dec 06 15:36:43 crc kubenswrapper[5003]: I1206 15:36:43.707523 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Dec 06 15:36:43 crc kubenswrapper[5003]: I1206 15:36:43.805323 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Dec 06 15:36:43 crc kubenswrapper[5003]: I1206 15:36:43.817029 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Dec 06 15:36:43 crc kubenswrapper[5003]: I1206 15:36:43.829384 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Dec 06 15:36:43 crc kubenswrapper[5003]: I1206 15:36:43.858154 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Dec 06 15:36:44 crc kubenswrapper[5003]: I1206 15:36:44.172887 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Dec 06 15:36:44 crc kubenswrapper[5003]: I1206 15:36:44.186167 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Dec 06 15:36:44 crc kubenswrapper[5003]: I1206 15:36:44.223794 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Dec 06 15:36:44 crc kubenswrapper[5003]: I1206 15:36:44.251597 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Dec 06 15:36:44 crc kubenswrapper[5003]: I1206 15:36:44.261086 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Dec 06 15:36:44 crc kubenswrapper[5003]: I1206 15:36:44.430916 5003 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Dec 06 15:36:44 crc kubenswrapper[5003]: I1206 15:36:44.471178 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Dec 06 15:36:44 crc kubenswrapper[5003]: I1206 15:36:44.585678 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Dec 06 15:36:44 crc kubenswrapper[5003]: I1206 15:36:44.611273 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Dec 06 15:36:44 crc kubenswrapper[5003]: I1206 15:36:44.656875 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Dec 06 15:36:44 crc kubenswrapper[5003]: I1206 15:36:44.694914 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Dec 06 15:36:44 crc kubenswrapper[5003]: I1206 15:36:44.715320 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Dec 06 15:36:44 crc kubenswrapper[5003]: I1206 15:36:44.718652 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Dec 06 15:36:44 crc kubenswrapper[5003]: I1206 15:36:44.757223 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Dec 06 15:36:44 crc kubenswrapper[5003]: I1206 15:36:44.790267 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Dec 06 15:36:44 crc kubenswrapper[5003]: I1206 15:36:44.995537 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Dec 06 15:36:44 crc kubenswrapper[5003]: I1206 15:36:44.995584 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Dec 06 15:36:45 crc kubenswrapper[5003]: I1206 15:36:45.040388 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Dec 06 15:36:45 crc kubenswrapper[5003]: I1206 15:36:45.058771 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Dec 06 15:36:45 crc kubenswrapper[5003]: I1206 15:36:45.060429 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Dec 06 15:36:45 crc kubenswrapper[5003]: I1206 15:36:45.079013 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Dec 06 15:36:45 crc kubenswrapper[5003]: I1206 15:36:45.119873 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Dec 06 15:36:45 crc kubenswrapper[5003]: I1206 15:36:45.175321 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Dec 06 15:36:45 crc kubenswrapper[5003]: I1206 15:36:45.178765 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Dec 06 15:36:45 crc kubenswrapper[5003]: I1206 15:36:45.224253 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Dec 06 15:36:45 crc kubenswrapper[5003]: I1206 15:36:45.296469 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Dec 06 15:36:45 crc kubenswrapper[5003]: I1206 15:36:45.390727 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Dec 06 15:36:45 crc kubenswrapper[5003]: I1206 15:36:45.443227 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Dec 06 15:36:45 crc kubenswrapper[5003]: I1206 15:36:45.443230 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Dec 06 15:36:45 crc kubenswrapper[5003]: I1206 15:36:45.500677 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Dec 06 15:36:45 crc kubenswrapper[5003]: I1206 15:36:45.540761 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Dec 06 15:36:45 crc kubenswrapper[5003]: I1206 15:36:45.562444 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Dec 06 15:36:45 crc kubenswrapper[5003]: I1206 15:36:45.633932 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Dec 06 15:36:45 crc kubenswrapper[5003]: I1206 15:36:45.710753 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Dec 06 15:36:45 crc kubenswrapper[5003]: I1206 15:36:45.736996 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Dec 06 15:36:45 crc kubenswrapper[5003]: I1206 15:36:45.768003 5003 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Dec 06 15:36:45 crc kubenswrapper[5003]: I1206 15:36:45.789664 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Dec 06 15:36:45 crc kubenswrapper[5003]: I1206 15:36:45.796809 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Dec 06 15:36:45 crc kubenswrapper[5003]: I1206 15:36:45.822165 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Dec 06 15:36:45 crc kubenswrapper[5003]: I1206 15:36:45.828382 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Dec 06 15:36:45 crc kubenswrapper[5003]: I1206 15:36:45.861542 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Dec 06 15:36:45 crc kubenswrapper[5003]: I1206 15:36:45.919984 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Dec 06 15:36:45 crc kubenswrapper[5003]: I1206 15:36:45.988911 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Dec 06 15:36:45 crc kubenswrapper[5003]: I1206 15:36:45.996792 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Dec 06 15:36:46 crc kubenswrapper[5003]: I1206 15:36:46.193438 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Dec 06 15:36:46 crc kubenswrapper[5003]: I1206 15:36:46.200621 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Dec 06 15:36:46 crc kubenswrapper[5003]: I1206 15:36:46.236231 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Dec 06 15:36:46 crc kubenswrapper[5003]: I1206 15:36:46.271122 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Dec 06 15:36:46 crc kubenswrapper[5003]: I1206 15:36:46.292822 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Dec 06 15:36:46 crc kubenswrapper[5003]: I1206 15:36:46.354675 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Dec 06 15:36:46 crc kubenswrapper[5003]: I1206 15:36:46.421194 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Dec 06 15:36:46 crc kubenswrapper[5003]: I1206 15:36:46.529003 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Dec 06 15:36:46 crc kubenswrapper[5003]: I1206 15:36:46.567136 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Dec 06 15:36:46 crc kubenswrapper[5003]: I1206 15:36:46.724527 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Dec 06 15:36:46 crc kubenswrapper[5003]: I1206 15:36:46.812055 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Dec 06 15:36:46 crc kubenswrapper[5003]: I1206 15:36:46.831220 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Dec 06 15:36:46 crc kubenswrapper[5003]: I1206 15:36:46.857074 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Dec 06 15:36:46 crc kubenswrapper[5003]: I1206 15:36:46.862741 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Dec 06 15:36:46 crc kubenswrapper[5003]: I1206 15:36:46.885887 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Dec 06 15:36:46 crc kubenswrapper[5003]: I1206 15:36:46.986704 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Dec 06 15:36:47 crc kubenswrapper[5003]: I1206 15:36:47.036445 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Dec 06 15:36:47 crc kubenswrapper[5003]: I1206 15:36:47.126202 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Dec 06 15:36:47 crc kubenswrapper[5003]: I1206 15:36:47.242337 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Dec 06 15:36:47 crc kubenswrapper[5003]: I1206 15:36:47.293633 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Dec 06 15:36:47 crc kubenswrapper[5003]: I1206 15:36:47.476500 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Dec 06 15:36:47 crc kubenswrapper[5003]: I1206 15:36:47.504876 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Dec 06 15:36:47 crc kubenswrapper[5003]: I1206 15:36:47.546541 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 06 15:36:47 crc kubenswrapper[5003]: I1206 15:36:47.553804 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Dec 06 15:36:47 crc kubenswrapper[5003]: I1206 15:36:47.603990 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 06 15:36:47 crc kubenswrapper[5003]: I1206 15:36:47.617859 5003 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Dec 06 15:36:47 crc kubenswrapper[5003]: I1206 15:36:47.632275 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Dec 06 15:36:47 crc kubenswrapper[5003]: I1206 15:36:47.670311 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Dec 06 15:36:47 crc kubenswrapper[5003]: I1206 15:36:47.721796 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Dec 06 15:36:47 crc kubenswrapper[5003]: I1206 15:36:47.748161 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Dec 06 15:36:47 crc kubenswrapper[5003]: I1206 15:36:47.748413 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Dec 06 15:36:47 crc kubenswrapper[5003]: I1206 15:36:47.814934 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 06 15:36:48 crc kubenswrapper[5003]: I1206 15:36:48.030895 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Dec 06 15:36:48 crc kubenswrapper[5003]: I1206 15:36:48.036205 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Dec 06 15:36:48 crc kubenswrapper[5003]: I1206 15:36:48.082929 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Dec 06 15:36:48 crc kubenswrapper[5003]: I1206 15:36:48.119482 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Dec 06 15:36:48 crc kubenswrapper[5003]: I1206 15:36:48.143804 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Dec 06 15:36:48 crc kubenswrapper[5003]: I1206 15:36:48.168069 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Dec 06 15:36:48 crc kubenswrapper[5003]: I1206 15:36:48.980795 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Dec 06 15:36:48 crc kubenswrapper[5003]: I1206 15:36:48.981744 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Dec 06 15:36:48 crc kubenswrapper[5003]: I1206 15:36:48.983968 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Dec 06 15:36:48 crc kubenswrapper[5003]: I1206 15:36:48.985900 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 06 15:36:48 crc kubenswrapper[5003]: I1206 15:36:48.986303 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Dec 06 15:36:48 crc kubenswrapper[5003]: I1206 15:36:48.987768 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Dec 06 15:36:48 crc kubenswrapper[5003]: I1206 15:36:48.987954 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Dec 06 15:36:48 crc kubenswrapper[5003]: I1206 15:36:48.989189 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Dec 06 15:36:48 crc kubenswrapper[5003]: I1206 15:36:48.989955 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Dec 06 15:36:48 crc kubenswrapper[5003]: I1206 15:36:48.989999 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Dec 06 15:36:48 crc kubenswrapper[5003]: I1206 15:36:48.990117 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Dec 06 15:36:48 crc kubenswrapper[5003]: I1206 15:36:48.990215 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Dec 06 15:36:48 crc kubenswrapper[5003]: I1206 15:36:48.990442 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Dec 06 15:36:48 crc kubenswrapper[5003]: I1206 15:36:48.990610 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Dec 06 15:36:48 crc kubenswrapper[5003]: I1206 15:36:48.990719 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Dec 06 15:36:48 crc kubenswrapper[5003]: I1206 15:36:48.990840 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Dec 06 15:36:48 crc kubenswrapper[5003]: I1206 15:36:48.992710 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Dec 06 15:36:49 crc kubenswrapper[5003]: I1206 15:36:49.055668 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Dec 06 15:36:49 crc kubenswrapper[5003]: I1206 15:36:49.116358 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Dec 06 15:36:49 crc kubenswrapper[5003]: I1206 15:36:49.324295 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Dec 06 15:36:49 crc kubenswrapper[5003]: I1206 15:36:49.326657 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Dec 06 15:36:49 crc kubenswrapper[5003]: I1206 15:36:49.461984 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Dec 06 15:36:49 crc kubenswrapper[5003]: I1206 15:36:49.532563 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Dec 06 15:36:49 crc kubenswrapper[5003]: I1206 15:36:49.649741 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Dec 06 15:36:49 crc kubenswrapper[5003]: I1206 15:36:49.652051 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Dec 06 15:36:49 crc kubenswrapper[5003]: I1206 15:36:49.662704 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Dec 06 15:36:49 crc kubenswrapper[5003]: I1206 15:36:49.663858 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Dec 06 15:36:49 crc kubenswrapper[5003]: I1206 15:36:49.695344 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Dec 06 15:36:49 crc kubenswrapper[5003]: I1206 15:36:49.810829 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Dec 06 15:36:49 crc kubenswrapper[5003]: I1206 15:36:49.838073 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Dec 06 15:36:49 crc kubenswrapper[5003]: I1206 15:36:49.862117 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Dec 06 15:36:49 crc kubenswrapper[5003]: I1206 15:36:49.876250 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Dec 06 15:36:49 crc kubenswrapper[5003]: I1206 15:36:49.927720 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Dec 06 15:36:49 crc kubenswrapper[5003]: I1206 15:36:49.966808 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Dec 06 15:36:50 crc kubenswrapper[5003]: I1206 15:36:50.064367 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Dec 06 15:36:50 crc kubenswrapper[5003]: I1206 15:36:50.073772 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Dec 06 15:36:50 crc kubenswrapper[5003]: I1206 15:36:50.133716 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Dec 06 15:36:50 crc kubenswrapper[5003]: I1206 15:36:50.173450 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Dec 06 15:36:50 crc kubenswrapper[5003]: I1206 15:36:50.259060 5003 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Dec 06 15:36:50 crc kubenswrapper[5003]: I1206 15:36:50.262502 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" podStartSLOduration=42.262470046 podStartE2EDuration="42.262470046s" podCreationTimestamp="2025-12-06 15:36:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 15:36:29.203583951 +0000 UTC m=+267.736938342" watchObservedRunningTime="2025-12-06 15:36:50.262470046 +0000 UTC m=+288.795824437" Dec 06 15:36:50 crc kubenswrapper[5003]: I1206 15:36:50.264245 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-tsg4h","openshift-marketplace/redhat-operators-gwpxn","openshift-kube-apiserver/kube-apiserver-crc"] Dec 06 15:36:50 crc kubenswrapper[5003]: I1206 15:36:50.264300 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-5cf8f9f8d-hv2pd","openshift-kube-apiserver/kube-apiserver-crc"] Dec 06 15:36:50 crc kubenswrapper[5003]: E1206 15:36:50.264477 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0fc961e1-eee3-4fd5-ac99-56b85320740b" containerName="oauth-openshift" Dec 06 15:36:50 crc kubenswrapper[5003]: I1206 15:36:50.264507 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="0fc961e1-eee3-4fd5-ac99-56b85320740b" containerName="oauth-openshift" Dec 06 15:36:50 crc kubenswrapper[5003]: E1206 15:36:50.264518 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="158c5179-aee8-4e5b-8a39-38d19808b3fd" containerName="registry-server" Dec 06 15:36:50 crc kubenswrapper[5003]: I1206 15:36:50.264524 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="158c5179-aee8-4e5b-8a39-38d19808b3fd" containerName="registry-server" Dec 06 15:36:50 crc kubenswrapper[5003]: E1206 15:36:50.264532 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7a65c8f8-e5cf-471d-bbf1-2c541164cdf8" containerName="installer" Dec 06 15:36:50 crc kubenswrapper[5003]: I1206 15:36:50.264537 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="7a65c8f8-e5cf-471d-bbf1-2c541164cdf8" containerName="installer" Dec 06 15:36:50 crc kubenswrapper[5003]: E1206 15:36:50.264544 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="158c5179-aee8-4e5b-8a39-38d19808b3fd" containerName="extract-content" Dec 06 15:36:50 crc kubenswrapper[5003]: I1206 15:36:50.264550 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="158c5179-aee8-4e5b-8a39-38d19808b3fd" containerName="extract-content" Dec 06 15:36:50 crc kubenswrapper[5003]: E1206 15:36:50.264562 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="158c5179-aee8-4e5b-8a39-38d19808b3fd" containerName="extract-utilities" Dec 06 15:36:50 crc kubenswrapper[5003]: I1206 15:36:50.264568 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="158c5179-aee8-4e5b-8a39-38d19808b3fd" containerName="extract-utilities" Dec 06 15:36:50 crc kubenswrapper[5003]: I1206 15:36:50.264652 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="0fc961e1-eee3-4fd5-ac99-56b85320740b" containerName="oauth-openshift" Dec 06 15:36:50 crc kubenswrapper[5003]: I1206 15:36:50.264664 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="7a65c8f8-e5cf-471d-bbf1-2c541164cdf8" containerName="installer" Dec 06 15:36:50 crc kubenswrapper[5003]: I1206 15:36:50.264673 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="158c5179-aee8-4e5b-8a39-38d19808b3fd" containerName="registry-server" Dec 06 15:36:50 crc kubenswrapper[5003]: I1206 15:36:50.264734 5003 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="5ae2d36f-5a31-4da3-aae8-0378c481f230" Dec 06 15:36:50 crc kubenswrapper[5003]: I1206 15:36:50.264753 5003 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="5ae2d36f-5a31-4da3-aae8-0378c481f230" Dec 06 15:36:50 crc kubenswrapper[5003]: I1206 15:36:50.265005 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-5cf8f9f8d-hv2pd" Dec 06 15:36:50 crc kubenswrapper[5003]: I1206 15:36:50.268975 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Dec 06 15:36:50 crc kubenswrapper[5003]: I1206 15:36:50.269387 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Dec 06 15:36:50 crc kubenswrapper[5003]: I1206 15:36:50.269560 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Dec 06 15:36:50 crc kubenswrapper[5003]: I1206 15:36:50.269865 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Dec 06 15:36:50 crc kubenswrapper[5003]: I1206 15:36:50.269678 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Dec 06 15:36:50 crc kubenswrapper[5003]: I1206 15:36:50.269903 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Dec 06 15:36:50 crc kubenswrapper[5003]: I1206 15:36:50.270004 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Dec 06 15:36:50 crc kubenswrapper[5003]: I1206 15:36:50.270194 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 06 15:36:50 crc kubenswrapper[5003]: I1206 15:36:50.270255 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Dec 06 15:36:50 crc kubenswrapper[5003]: I1206 15:36:50.271226 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Dec 06 15:36:50 crc kubenswrapper[5003]: I1206 15:36:50.271463 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Dec 06 15:36:50 crc kubenswrapper[5003]: I1206 15:36:50.271653 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Dec 06 15:36:50 crc kubenswrapper[5003]: I1206 15:36:50.271939 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Dec 06 15:36:50 crc kubenswrapper[5003]: I1206 15:36:50.281773 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Dec 06 15:36:50 crc kubenswrapper[5003]: I1206 15:36:50.283094 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Dec 06 15:36:50 crc kubenswrapper[5003]: I1206 15:36:50.288267 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Dec 06 15:36:50 crc kubenswrapper[5003]: I1206 15:36:50.288716 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Dec 06 15:36:50 crc kubenswrapper[5003]: I1206 15:36:50.291465 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=21.291449257 podStartE2EDuration="21.291449257s" podCreationTimestamp="2025-12-06 15:36:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 15:36:50.287909527 +0000 UTC m=+288.821263928" watchObservedRunningTime="2025-12-06 15:36:50.291449257 +0000 UTC m=+288.824803638" Dec 06 15:36:50 crc kubenswrapper[5003]: I1206 15:36:50.291781 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Dec 06 15:36:50 crc kubenswrapper[5003]: I1206 15:36:50.292375 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Dec 06 15:36:50 crc kubenswrapper[5003]: I1206 15:36:50.327372 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Dec 06 15:36:50 crc kubenswrapper[5003]: I1206 15:36:50.377297 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/010a07de-7eee-4d38-b3b9-6880f1d9c622-v4-0-config-user-template-error\") pod \"oauth-openshift-5cf8f9f8d-hv2pd\" (UID: \"010a07de-7eee-4d38-b3b9-6880f1d9c622\") " pod="openshift-authentication/oauth-openshift-5cf8f9f8d-hv2pd" Dec 06 15:36:50 crc kubenswrapper[5003]: I1206 15:36:50.377375 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ltfwh\" (UniqueName: \"kubernetes.io/projected/010a07de-7eee-4d38-b3b9-6880f1d9c622-kube-api-access-ltfwh\") pod \"oauth-openshift-5cf8f9f8d-hv2pd\" (UID: \"010a07de-7eee-4d38-b3b9-6880f1d9c622\") " pod="openshift-authentication/oauth-openshift-5cf8f9f8d-hv2pd" Dec 06 15:36:50 crc kubenswrapper[5003]: I1206 15:36:50.377476 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/010a07de-7eee-4d38-b3b9-6880f1d9c622-v4-0-config-system-session\") pod \"oauth-openshift-5cf8f9f8d-hv2pd\" (UID: \"010a07de-7eee-4d38-b3b9-6880f1d9c622\") " pod="openshift-authentication/oauth-openshift-5cf8f9f8d-hv2pd" Dec 06 15:36:50 crc kubenswrapper[5003]: I1206 15:36:50.377559 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/010a07de-7eee-4d38-b3b9-6880f1d9c622-v4-0-config-system-service-ca\") pod \"oauth-openshift-5cf8f9f8d-hv2pd\" (UID: \"010a07de-7eee-4d38-b3b9-6880f1d9c622\") " pod="openshift-authentication/oauth-openshift-5cf8f9f8d-hv2pd" Dec 06 15:36:50 crc kubenswrapper[5003]: I1206 15:36:50.377610 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/010a07de-7eee-4d38-b3b9-6880f1d9c622-v4-0-config-user-template-login\") pod \"oauth-openshift-5cf8f9f8d-hv2pd\" (UID: \"010a07de-7eee-4d38-b3b9-6880f1d9c622\") " pod="openshift-authentication/oauth-openshift-5cf8f9f8d-hv2pd" Dec 06 15:36:50 crc kubenswrapper[5003]: I1206 15:36:50.377641 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/010a07de-7eee-4d38-b3b9-6880f1d9c622-audit-policies\") pod \"oauth-openshift-5cf8f9f8d-hv2pd\" (UID: \"010a07de-7eee-4d38-b3b9-6880f1d9c622\") " pod="openshift-authentication/oauth-openshift-5cf8f9f8d-hv2pd" Dec 06 15:36:50 crc kubenswrapper[5003]: I1206 15:36:50.377718 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/010a07de-7eee-4d38-b3b9-6880f1d9c622-audit-dir\") pod \"oauth-openshift-5cf8f9f8d-hv2pd\" (UID: \"010a07de-7eee-4d38-b3b9-6880f1d9c622\") " pod="openshift-authentication/oauth-openshift-5cf8f9f8d-hv2pd" Dec 06 15:36:50 crc kubenswrapper[5003]: I1206 15:36:50.377761 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/010a07de-7eee-4d38-b3b9-6880f1d9c622-v4-0-config-system-cliconfig\") pod \"oauth-openshift-5cf8f9f8d-hv2pd\" (UID: \"010a07de-7eee-4d38-b3b9-6880f1d9c622\") " pod="openshift-authentication/oauth-openshift-5cf8f9f8d-hv2pd" Dec 06 15:36:50 crc kubenswrapper[5003]: I1206 15:36:50.377797 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/010a07de-7eee-4d38-b3b9-6880f1d9c622-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-5cf8f9f8d-hv2pd\" (UID: \"010a07de-7eee-4d38-b3b9-6880f1d9c622\") " pod="openshift-authentication/oauth-openshift-5cf8f9f8d-hv2pd" Dec 06 15:36:50 crc kubenswrapper[5003]: I1206 15:36:50.377849 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/010a07de-7eee-4d38-b3b9-6880f1d9c622-v4-0-config-system-serving-cert\") pod \"oauth-openshift-5cf8f9f8d-hv2pd\" (UID: \"010a07de-7eee-4d38-b3b9-6880f1d9c622\") " pod="openshift-authentication/oauth-openshift-5cf8f9f8d-hv2pd" Dec 06 15:36:50 crc kubenswrapper[5003]: I1206 15:36:50.377891 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/010a07de-7eee-4d38-b3b9-6880f1d9c622-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-5cf8f9f8d-hv2pd\" (UID: \"010a07de-7eee-4d38-b3b9-6880f1d9c622\") " pod="openshift-authentication/oauth-openshift-5cf8f9f8d-hv2pd" Dec 06 15:36:50 crc kubenswrapper[5003]: I1206 15:36:50.377926 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/010a07de-7eee-4d38-b3b9-6880f1d9c622-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-5cf8f9f8d-hv2pd\" (UID: \"010a07de-7eee-4d38-b3b9-6880f1d9c622\") " pod="openshift-authentication/oauth-openshift-5cf8f9f8d-hv2pd" Dec 06 15:36:50 crc kubenswrapper[5003]: I1206 15:36:50.377980 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/010a07de-7eee-4d38-b3b9-6880f1d9c622-v4-0-config-system-router-certs\") pod \"oauth-openshift-5cf8f9f8d-hv2pd\" (UID: \"010a07de-7eee-4d38-b3b9-6880f1d9c622\") " pod="openshift-authentication/oauth-openshift-5cf8f9f8d-hv2pd" Dec 06 15:36:50 crc kubenswrapper[5003]: I1206 15:36:50.378051 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/010a07de-7eee-4d38-b3b9-6880f1d9c622-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-5cf8f9f8d-hv2pd\" (UID: \"010a07de-7eee-4d38-b3b9-6880f1d9c622\") " pod="openshift-authentication/oauth-openshift-5cf8f9f8d-hv2pd" Dec 06 15:36:50 crc kubenswrapper[5003]: I1206 15:36:50.383354 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Dec 06 15:36:50 crc kubenswrapper[5003]: I1206 15:36:50.418948 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Dec 06 15:36:50 crc kubenswrapper[5003]: I1206 15:36:50.454153 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Dec 06 15:36:50 crc kubenswrapper[5003]: I1206 15:36:50.465793 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Dec 06 15:36:50 crc kubenswrapper[5003]: I1206 15:36:50.478910 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/010a07de-7eee-4d38-b3b9-6880f1d9c622-audit-dir\") pod \"oauth-openshift-5cf8f9f8d-hv2pd\" (UID: \"010a07de-7eee-4d38-b3b9-6880f1d9c622\") " pod="openshift-authentication/oauth-openshift-5cf8f9f8d-hv2pd" Dec 06 15:36:50 crc kubenswrapper[5003]: I1206 15:36:50.478956 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/010a07de-7eee-4d38-b3b9-6880f1d9c622-v4-0-config-system-cliconfig\") pod \"oauth-openshift-5cf8f9f8d-hv2pd\" (UID: \"010a07de-7eee-4d38-b3b9-6880f1d9c622\") " pod="openshift-authentication/oauth-openshift-5cf8f9f8d-hv2pd" Dec 06 15:36:50 crc kubenswrapper[5003]: I1206 15:36:50.478984 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/010a07de-7eee-4d38-b3b9-6880f1d9c622-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-5cf8f9f8d-hv2pd\" (UID: \"010a07de-7eee-4d38-b3b9-6880f1d9c622\") " pod="openshift-authentication/oauth-openshift-5cf8f9f8d-hv2pd" Dec 06 15:36:50 crc kubenswrapper[5003]: I1206 15:36:50.479008 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/010a07de-7eee-4d38-b3b9-6880f1d9c622-v4-0-config-system-serving-cert\") pod \"oauth-openshift-5cf8f9f8d-hv2pd\" (UID: \"010a07de-7eee-4d38-b3b9-6880f1d9c622\") " pod="openshift-authentication/oauth-openshift-5cf8f9f8d-hv2pd" Dec 06 15:36:50 crc kubenswrapper[5003]: I1206 15:36:50.479033 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/010a07de-7eee-4d38-b3b9-6880f1d9c622-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-5cf8f9f8d-hv2pd\" (UID: \"010a07de-7eee-4d38-b3b9-6880f1d9c622\") " pod="openshift-authentication/oauth-openshift-5cf8f9f8d-hv2pd" Dec 06 15:36:50 crc kubenswrapper[5003]: I1206 15:36:50.479052 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/010a07de-7eee-4d38-b3b9-6880f1d9c622-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-5cf8f9f8d-hv2pd\" (UID: \"010a07de-7eee-4d38-b3b9-6880f1d9c622\") " pod="openshift-authentication/oauth-openshift-5cf8f9f8d-hv2pd" Dec 06 15:36:50 crc kubenswrapper[5003]: I1206 15:36:50.479061 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/010a07de-7eee-4d38-b3b9-6880f1d9c622-audit-dir\") pod \"oauth-openshift-5cf8f9f8d-hv2pd\" (UID: \"010a07de-7eee-4d38-b3b9-6880f1d9c622\") " pod="openshift-authentication/oauth-openshift-5cf8f9f8d-hv2pd" Dec 06 15:36:50 crc kubenswrapper[5003]: I1206 15:36:50.479069 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/010a07de-7eee-4d38-b3b9-6880f1d9c622-v4-0-config-system-router-certs\") pod \"oauth-openshift-5cf8f9f8d-hv2pd\" (UID: \"010a07de-7eee-4d38-b3b9-6880f1d9c622\") " pod="openshift-authentication/oauth-openshift-5cf8f9f8d-hv2pd" Dec 06 15:36:50 crc kubenswrapper[5003]: I1206 15:36:50.479149 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/010a07de-7eee-4d38-b3b9-6880f1d9c622-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-5cf8f9f8d-hv2pd\" (UID: \"010a07de-7eee-4d38-b3b9-6880f1d9c622\") " pod="openshift-authentication/oauth-openshift-5cf8f9f8d-hv2pd" Dec 06 15:36:50 crc kubenswrapper[5003]: I1206 15:36:50.479200 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/010a07de-7eee-4d38-b3b9-6880f1d9c622-v4-0-config-user-template-error\") pod \"oauth-openshift-5cf8f9f8d-hv2pd\" (UID: \"010a07de-7eee-4d38-b3b9-6880f1d9c622\") " pod="openshift-authentication/oauth-openshift-5cf8f9f8d-hv2pd" Dec 06 15:36:50 crc kubenswrapper[5003]: I1206 15:36:50.479272 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ltfwh\" (UniqueName: \"kubernetes.io/projected/010a07de-7eee-4d38-b3b9-6880f1d9c622-kube-api-access-ltfwh\") pod \"oauth-openshift-5cf8f9f8d-hv2pd\" (UID: \"010a07de-7eee-4d38-b3b9-6880f1d9c622\") " pod="openshift-authentication/oauth-openshift-5cf8f9f8d-hv2pd" Dec 06 15:36:50 crc kubenswrapper[5003]: I1206 15:36:50.480305 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/010a07de-7eee-4d38-b3b9-6880f1d9c622-v4-0-config-system-cliconfig\") pod \"oauth-openshift-5cf8f9f8d-hv2pd\" (UID: \"010a07de-7eee-4d38-b3b9-6880f1d9c622\") " pod="openshift-authentication/oauth-openshift-5cf8f9f8d-hv2pd" Dec 06 15:36:50 crc kubenswrapper[5003]: I1206 15:36:50.480713 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/010a07de-7eee-4d38-b3b9-6880f1d9c622-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-5cf8f9f8d-hv2pd\" (UID: \"010a07de-7eee-4d38-b3b9-6880f1d9c622\") " pod="openshift-authentication/oauth-openshift-5cf8f9f8d-hv2pd" Dec 06 15:36:50 crc kubenswrapper[5003]: I1206 15:36:50.480829 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/010a07de-7eee-4d38-b3b9-6880f1d9c622-v4-0-config-system-session\") pod \"oauth-openshift-5cf8f9f8d-hv2pd\" (UID: \"010a07de-7eee-4d38-b3b9-6880f1d9c622\") " pod="openshift-authentication/oauth-openshift-5cf8f9f8d-hv2pd" Dec 06 15:36:50 crc kubenswrapper[5003]: I1206 15:36:50.480855 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/010a07de-7eee-4d38-b3b9-6880f1d9c622-v4-0-config-system-service-ca\") pod \"oauth-openshift-5cf8f9f8d-hv2pd\" (UID: \"010a07de-7eee-4d38-b3b9-6880f1d9c622\") " pod="openshift-authentication/oauth-openshift-5cf8f9f8d-hv2pd" Dec 06 15:36:50 crc kubenswrapper[5003]: I1206 15:36:50.481220 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/010a07de-7eee-4d38-b3b9-6880f1d9c622-v4-0-config-user-template-login\") pod \"oauth-openshift-5cf8f9f8d-hv2pd\" (UID: \"010a07de-7eee-4d38-b3b9-6880f1d9c622\") " pod="openshift-authentication/oauth-openshift-5cf8f9f8d-hv2pd" Dec 06 15:36:50 crc kubenswrapper[5003]: I1206 15:36:50.481306 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/010a07de-7eee-4d38-b3b9-6880f1d9c622-audit-policies\") pod \"oauth-openshift-5cf8f9f8d-hv2pd\" (UID: \"010a07de-7eee-4d38-b3b9-6880f1d9c622\") " pod="openshift-authentication/oauth-openshift-5cf8f9f8d-hv2pd" Dec 06 15:36:50 crc kubenswrapper[5003]: I1206 15:36:50.481534 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/010a07de-7eee-4d38-b3b9-6880f1d9c622-v4-0-config-system-service-ca\") pod \"oauth-openshift-5cf8f9f8d-hv2pd\" (UID: \"010a07de-7eee-4d38-b3b9-6880f1d9c622\") " pod="openshift-authentication/oauth-openshift-5cf8f9f8d-hv2pd" Dec 06 15:36:50 crc kubenswrapper[5003]: I1206 15:36:50.482092 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/010a07de-7eee-4d38-b3b9-6880f1d9c622-audit-policies\") pod \"oauth-openshift-5cf8f9f8d-hv2pd\" (UID: \"010a07de-7eee-4d38-b3b9-6880f1d9c622\") " pod="openshift-authentication/oauth-openshift-5cf8f9f8d-hv2pd" Dec 06 15:36:50 crc kubenswrapper[5003]: I1206 15:36:50.484778 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/010a07de-7eee-4d38-b3b9-6880f1d9c622-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-5cf8f9f8d-hv2pd\" (UID: \"010a07de-7eee-4d38-b3b9-6880f1d9c622\") " pod="openshift-authentication/oauth-openshift-5cf8f9f8d-hv2pd" Dec 06 15:36:50 crc kubenswrapper[5003]: I1206 15:36:50.484796 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/010a07de-7eee-4d38-b3b9-6880f1d9c622-v4-0-config-system-session\") pod \"oauth-openshift-5cf8f9f8d-hv2pd\" (UID: \"010a07de-7eee-4d38-b3b9-6880f1d9c622\") " pod="openshift-authentication/oauth-openshift-5cf8f9f8d-hv2pd" Dec 06 15:36:50 crc kubenswrapper[5003]: I1206 15:36:50.484822 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/010a07de-7eee-4d38-b3b9-6880f1d9c622-v4-0-config-system-serving-cert\") pod \"oauth-openshift-5cf8f9f8d-hv2pd\" (UID: \"010a07de-7eee-4d38-b3b9-6880f1d9c622\") " pod="openshift-authentication/oauth-openshift-5cf8f9f8d-hv2pd" Dec 06 15:36:50 crc kubenswrapper[5003]: I1206 15:36:50.484884 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/010a07de-7eee-4d38-b3b9-6880f1d9c622-v4-0-config-user-template-login\") pod \"oauth-openshift-5cf8f9f8d-hv2pd\" (UID: \"010a07de-7eee-4d38-b3b9-6880f1d9c622\") " pod="openshift-authentication/oauth-openshift-5cf8f9f8d-hv2pd" Dec 06 15:36:50 crc kubenswrapper[5003]: I1206 15:36:50.485318 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/010a07de-7eee-4d38-b3b9-6880f1d9c622-v4-0-config-user-template-error\") pod \"oauth-openshift-5cf8f9f8d-hv2pd\" (UID: \"010a07de-7eee-4d38-b3b9-6880f1d9c622\") " pod="openshift-authentication/oauth-openshift-5cf8f9f8d-hv2pd" Dec 06 15:36:50 crc kubenswrapper[5003]: I1206 15:36:50.490944 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/010a07de-7eee-4d38-b3b9-6880f1d9c622-v4-0-config-system-router-certs\") pod \"oauth-openshift-5cf8f9f8d-hv2pd\" (UID: \"010a07de-7eee-4d38-b3b9-6880f1d9c622\") " pod="openshift-authentication/oauth-openshift-5cf8f9f8d-hv2pd" Dec 06 15:36:50 crc kubenswrapper[5003]: I1206 15:36:50.495575 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/010a07de-7eee-4d38-b3b9-6880f1d9c622-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-5cf8f9f8d-hv2pd\" (UID: \"010a07de-7eee-4d38-b3b9-6880f1d9c622\") " pod="openshift-authentication/oauth-openshift-5cf8f9f8d-hv2pd" Dec 06 15:36:50 crc kubenswrapper[5003]: I1206 15:36:50.496326 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/010a07de-7eee-4d38-b3b9-6880f1d9c622-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-5cf8f9f8d-hv2pd\" (UID: \"010a07de-7eee-4d38-b3b9-6880f1d9c622\") " pod="openshift-authentication/oauth-openshift-5cf8f9f8d-hv2pd" Dec 06 15:36:50 crc kubenswrapper[5003]: I1206 15:36:50.502350 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ltfwh\" (UniqueName: \"kubernetes.io/projected/010a07de-7eee-4d38-b3b9-6880f1d9c622-kube-api-access-ltfwh\") pod \"oauth-openshift-5cf8f9f8d-hv2pd\" (UID: \"010a07de-7eee-4d38-b3b9-6880f1d9c622\") " pod="openshift-authentication/oauth-openshift-5cf8f9f8d-hv2pd" Dec 06 15:36:50 crc kubenswrapper[5003]: I1206 15:36:50.527242 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Dec 06 15:36:50 crc kubenswrapper[5003]: I1206 15:36:50.572076 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Dec 06 15:36:50 crc kubenswrapper[5003]: I1206 15:36:50.586533 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-5cf8f9f8d-hv2pd" Dec 06 15:36:50 crc kubenswrapper[5003]: I1206 15:36:50.742500 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Dec 06 15:36:50 crc kubenswrapper[5003]: I1206 15:36:50.789120 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Dec 06 15:36:50 crc kubenswrapper[5003]: I1206 15:36:50.818204 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Dec 06 15:36:50 crc kubenswrapper[5003]: I1206 15:36:50.974153 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Dec 06 15:36:51 crc kubenswrapper[5003]: I1206 15:36:51.166554 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Dec 06 15:36:51 crc kubenswrapper[5003]: I1206 15:36:51.252786 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Dec 06 15:36:51 crc kubenswrapper[5003]: I1206 15:36:51.332144 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Dec 06 15:36:51 crc kubenswrapper[5003]: I1206 15:36:51.376174 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Dec 06 15:36:51 crc kubenswrapper[5003]: I1206 15:36:51.407602 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Dec 06 15:36:51 crc kubenswrapper[5003]: I1206 15:36:51.433602 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Dec 06 15:36:51 crc kubenswrapper[5003]: I1206 15:36:51.506270 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Dec 06 15:36:51 crc kubenswrapper[5003]: I1206 15:36:51.606687 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Dec 06 15:36:51 crc kubenswrapper[5003]: I1206 15:36:51.666427 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Dec 06 15:36:51 crc kubenswrapper[5003]: I1206 15:36:51.719046 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Dec 06 15:36:51 crc kubenswrapper[5003]: I1206 15:36:51.721151 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0fc961e1-eee3-4fd5-ac99-56b85320740b" path="/var/lib/kubelet/pods/0fc961e1-eee3-4fd5-ac99-56b85320740b/volumes" Dec 06 15:36:51 crc kubenswrapper[5003]: I1206 15:36:51.722072 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="158c5179-aee8-4e5b-8a39-38d19808b3fd" path="/var/lib/kubelet/pods/158c5179-aee8-4e5b-8a39-38d19808b3fd/volumes" Dec 06 15:36:51 crc kubenswrapper[5003]: I1206 15:36:51.727962 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Dec 06 15:36:51 crc kubenswrapper[5003]: I1206 15:36:51.767036 5003 kubelet.go:2431] "SyncLoop REMOVE" source="file" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Dec 06 15:36:51 crc kubenswrapper[5003]: I1206 15:36:51.767293 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" containerID="cri-o://a2b6b0947f32c0a44ee1fb4927ff98d8f411909f06c4abcf03fcbd5b70180311" gracePeriod=5 Dec 06 15:36:51 crc kubenswrapper[5003]: I1206 15:36:51.782322 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Dec 06 15:36:51 crc kubenswrapper[5003]: I1206 15:36:51.861139 5003 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Dec 06 15:36:51 crc kubenswrapper[5003]: I1206 15:36:51.960334 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Dec 06 15:36:51 crc kubenswrapper[5003]: I1206 15:36:51.993390 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Dec 06 15:36:51 crc kubenswrapper[5003]: I1206 15:36:51.994314 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 06 15:36:52 crc kubenswrapper[5003]: I1206 15:36:52.312068 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Dec 06 15:36:52 crc kubenswrapper[5003]: I1206 15:36:52.316453 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Dec 06 15:36:52 crc kubenswrapper[5003]: I1206 15:36:52.392056 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Dec 06 15:36:52 crc kubenswrapper[5003]: I1206 15:36:52.398261 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Dec 06 15:36:52 crc kubenswrapper[5003]: I1206 15:36:52.410596 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Dec 06 15:36:52 crc kubenswrapper[5003]: I1206 15:36:52.492395 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Dec 06 15:36:52 crc kubenswrapper[5003]: I1206 15:36:52.523183 5003 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/kube-controller-manager namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10257/healthz\": dial tcp 192.168.126.11:10257: connect: connection refused" start-of-body= Dec 06 15:36:52 crc kubenswrapper[5003]: I1206 15:36:52.523248 5003 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="kube-controller-manager" probeResult="failure" output="Get \"https://192.168.126.11:10257/healthz\": dial tcp 192.168.126.11:10257: connect: connection refused" Dec 06 15:36:52 crc kubenswrapper[5003]: I1206 15:36:52.523306 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 06 15:36:52 crc kubenswrapper[5003]: I1206 15:36:52.524195 5003 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="kube-controller-manager" containerStatusID={"Type":"cri-o","ID":"4f874ed8c60f2fb82c96596812bcd8d2706b8e179532086b790372da4104b8bd"} pod="openshift-kube-controller-manager/kube-controller-manager-crc" containerMessage="Container kube-controller-manager failed startup probe, will be restarted" Dec 06 15:36:52 crc kubenswrapper[5003]: I1206 15:36:52.524381 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="kube-controller-manager" containerID="cri-o://4f874ed8c60f2fb82c96596812bcd8d2706b8e179532086b790372da4104b8bd" gracePeriod=30 Dec 06 15:36:52 crc kubenswrapper[5003]: I1206 15:36:52.543902 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Dec 06 15:36:52 crc kubenswrapper[5003]: I1206 15:36:52.595670 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Dec 06 15:36:52 crc kubenswrapper[5003]: I1206 15:36:52.688061 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Dec 06 15:36:52 crc kubenswrapper[5003]: I1206 15:36:52.775925 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Dec 06 15:36:52 crc kubenswrapper[5003]: I1206 15:36:52.817473 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-5cf8f9f8d-hv2pd"] Dec 06 15:36:52 crc kubenswrapper[5003]: I1206 15:36:52.826981 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Dec 06 15:36:53 crc kubenswrapper[5003]: I1206 15:36:53.231844 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-5cf8f9f8d-hv2pd"] Dec 06 15:36:53 crc kubenswrapper[5003]: I1206 15:36:53.243703 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Dec 06 15:36:53 crc kubenswrapper[5003]: I1206 15:36:53.261159 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Dec 06 15:36:53 crc kubenswrapper[5003]: I1206 15:36:53.291523 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Dec 06 15:36:53 crc kubenswrapper[5003]: I1206 15:36:53.306438 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Dec 06 15:36:53 crc kubenswrapper[5003]: I1206 15:36:53.309513 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Dec 06 15:36:53 crc kubenswrapper[5003]: I1206 15:36:53.382406 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Dec 06 15:36:53 crc kubenswrapper[5003]: I1206 15:36:53.447720 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Dec 06 15:36:53 crc kubenswrapper[5003]: I1206 15:36:53.673991 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Dec 06 15:36:53 crc kubenswrapper[5003]: I1206 15:36:53.700247 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Dec 06 15:36:53 crc kubenswrapper[5003]: I1206 15:36:53.832288 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 06 15:36:53 crc kubenswrapper[5003]: I1206 15:36:53.868463 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Dec 06 15:36:53 crc kubenswrapper[5003]: I1206 15:36:53.889034 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Dec 06 15:36:53 crc kubenswrapper[5003]: I1206 15:36:53.988943 5003 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Dec 06 15:36:54 crc kubenswrapper[5003]: I1206 15:36:54.014043 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Dec 06 15:36:54 crc kubenswrapper[5003]: I1206 15:36:54.020573 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-5cf8f9f8d-hv2pd" event={"ID":"010a07de-7eee-4d38-b3b9-6880f1d9c622","Type":"ContainerStarted","Data":"b78a716d1f8be0db0bfe3c4fec9e607646fe3b130b3ed382a456cca785918795"} Dec 06 15:36:54 crc kubenswrapper[5003]: I1206 15:36:54.020607 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-5cf8f9f8d-hv2pd" event={"ID":"010a07de-7eee-4d38-b3b9-6880f1d9c622","Type":"ContainerStarted","Data":"3f563840ba658200881c68dce3c43901b011f28ad1e778fd694d42524a0ce1a9"} Dec 06 15:36:54 crc kubenswrapper[5003]: I1206 15:36:54.020976 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-5cf8f9f8d-hv2pd" Dec 06 15:36:54 crc kubenswrapper[5003]: I1206 15:36:54.026618 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-5cf8f9f8d-hv2pd" Dec 06 15:36:54 crc kubenswrapper[5003]: I1206 15:36:54.045612 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-5cf8f9f8d-hv2pd" podStartSLOduration=65.04559531 podStartE2EDuration="1m5.04559531s" podCreationTimestamp="2025-12-06 15:35:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 15:36:54.044238782 +0000 UTC m=+292.577593183" watchObservedRunningTime="2025-12-06 15:36:54.04559531 +0000 UTC m=+292.578949691" Dec 06 15:36:54 crc kubenswrapper[5003]: I1206 15:36:54.077224 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Dec 06 15:36:54 crc kubenswrapper[5003]: I1206 15:36:54.374055 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Dec 06 15:36:54 crc kubenswrapper[5003]: I1206 15:36:54.427288 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Dec 06 15:36:54 crc kubenswrapper[5003]: I1206 15:36:54.528835 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Dec 06 15:36:54 crc kubenswrapper[5003]: I1206 15:36:54.538593 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Dec 06 15:36:54 crc kubenswrapper[5003]: I1206 15:36:54.605557 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Dec 06 15:36:54 crc kubenswrapper[5003]: I1206 15:36:54.719177 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Dec 06 15:36:54 crc kubenswrapper[5003]: I1206 15:36:54.840844 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Dec 06 15:36:54 crc kubenswrapper[5003]: I1206 15:36:54.908266 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Dec 06 15:36:54 crc kubenswrapper[5003]: I1206 15:36:54.936936 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Dec 06 15:36:54 crc kubenswrapper[5003]: I1206 15:36:54.940848 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Dec 06 15:36:55 crc kubenswrapper[5003]: I1206 15:36:55.359283 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Dec 06 15:36:55 crc kubenswrapper[5003]: I1206 15:36:55.362835 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Dec 06 15:36:55 crc kubenswrapper[5003]: I1206 15:36:55.628138 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Dec 06 15:36:55 crc kubenswrapper[5003]: I1206 15:36:55.726654 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Dec 06 15:36:55 crc kubenswrapper[5003]: I1206 15:36:55.932071 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Dec 06 15:36:56 crc kubenswrapper[5003]: I1206 15:36:56.525677 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Dec 06 15:36:56 crc kubenswrapper[5003]: I1206 15:36:56.719665 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Dec 06 15:36:56 crc kubenswrapper[5003]: I1206 15:36:56.896684 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Dec 06 15:36:57 crc kubenswrapper[5003]: I1206 15:36:57.043129 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Dec 06 15:36:57 crc kubenswrapper[5003]: I1206 15:36:57.043226 5003 generic.go:334] "Generic (PLEG): container finished" podID="f85e55b1a89d02b0cb034b1ea31ed45a" containerID="a2b6b0947f32c0a44ee1fb4927ff98d8f411909f06c4abcf03fcbd5b70180311" exitCode=137 Dec 06 15:36:57 crc kubenswrapper[5003]: I1206 15:36:57.272946 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Dec 06 15:36:57 crc kubenswrapper[5003]: I1206 15:36:57.464700 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Dec 06 15:36:57 crc kubenswrapper[5003]: I1206 15:36:57.464784 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 06 15:36:57 crc kubenswrapper[5003]: I1206 15:36:57.495010 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 06 15:36:57 crc kubenswrapper[5003]: I1206 15:36:57.495077 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 06 15:36:57 crc kubenswrapper[5003]: I1206 15:36:57.495094 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir" (OuterVolumeSpecName: "resource-dir") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 06 15:36:57 crc kubenswrapper[5003]: I1206 15:36:57.495116 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 06 15:36:57 crc kubenswrapper[5003]: I1206 15:36:57.495139 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 06 15:36:57 crc kubenswrapper[5003]: I1206 15:36:57.495157 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 06 15:36:57 crc kubenswrapper[5003]: I1206 15:36:57.495205 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log" (OuterVolumeSpecName: "var-log") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "var-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 06 15:36:57 crc kubenswrapper[5003]: I1206 15:36:57.495225 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock" (OuterVolumeSpecName: "var-lock") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "var-lock". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 06 15:36:57 crc kubenswrapper[5003]: I1206 15:36:57.495279 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests" (OuterVolumeSpecName: "manifests") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "manifests". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 06 15:36:57 crc kubenswrapper[5003]: I1206 15:36:57.495368 5003 reconciler_common.go:293] "Volume detached for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") on node \"crc\" DevicePath \"\"" Dec 06 15:36:57 crc kubenswrapper[5003]: I1206 15:36:57.495382 5003 reconciler_common.go:293] "Volume detached for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") on node \"crc\" DevicePath \"\"" Dec 06 15:36:57 crc kubenswrapper[5003]: I1206 15:36:57.495390 5003 reconciler_common.go:293] "Volume detached for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") on node \"crc\" DevicePath \"\"" Dec 06 15:36:57 crc kubenswrapper[5003]: I1206 15:36:57.495399 5003 reconciler_common.go:293] "Volume detached for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") on node \"crc\" DevicePath \"\"" Dec 06 15:36:57 crc kubenswrapper[5003]: I1206 15:36:57.502594 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir" (OuterVolumeSpecName: "pod-resource-dir") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "pod-resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 06 15:36:57 crc kubenswrapper[5003]: I1206 15:36:57.596276 5003 reconciler_common.go:293] "Volume detached for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") on node \"crc\" DevicePath \"\"" Dec 06 15:36:57 crc kubenswrapper[5003]: I1206 15:36:57.719627 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" path="/var/lib/kubelet/pods/f85e55b1a89d02b0cb034b1ea31ed45a/volumes" Dec 06 15:36:57 crc kubenswrapper[5003]: I1206 15:36:57.719883 5003 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" podUID="" Dec 06 15:36:57 crc kubenswrapper[5003]: I1206 15:36:57.734623 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Dec 06 15:36:57 crc kubenswrapper[5003]: I1206 15:36:57.734674 5003 kubelet.go:2649] "Unable to find pod for mirror pod, skipping" mirrorPod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" mirrorPodUID="faec5933-47a8-449a-af4d-9143ac78c1a9" Dec 06 15:36:57 crc kubenswrapper[5003]: I1206 15:36:57.741880 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Dec 06 15:36:57 crc kubenswrapper[5003]: I1206 15:36:57.741906 5003 kubelet.go:2673] "Unable to find pod for mirror pod, skipping" mirrorPod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" mirrorPodUID="faec5933-47a8-449a-af4d-9143ac78c1a9" Dec 06 15:36:57 crc kubenswrapper[5003]: I1206 15:36:57.981326 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Dec 06 15:36:58 crc kubenswrapper[5003]: I1206 15:36:58.054046 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Dec 06 15:36:58 crc kubenswrapper[5003]: I1206 15:36:58.054160 5003 scope.go:117] "RemoveContainer" containerID="a2b6b0947f32c0a44ee1fb4927ff98d8f411909f06c4abcf03fcbd5b70180311" Dec 06 15:36:58 crc kubenswrapper[5003]: I1206 15:36:58.054279 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 06 15:37:01 crc kubenswrapper[5003]: I1206 15:37:01.599584 5003 cert_rotation.go:91] certificate rotation detected, shutting down client connections to start using new credentials Dec 06 15:37:10 crc kubenswrapper[5003]: I1206 15:37:10.640463 5003 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-lwc4r container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.29:8080/healthz\": dial tcp 10.217.0.29:8080: connect: connection refused" start-of-body= Dec 06 15:37:10 crc kubenswrapper[5003]: I1206 15:37:10.641330 5003 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-lwc4r" podUID="765bb4a4-7c41-414b-a9be-a54be49b76ff" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.29:8080/healthz\": dial tcp 10.217.0.29:8080: connect: connection refused" Dec 06 15:37:10 crc kubenswrapper[5003]: I1206 15:37:10.640531 5003 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-lwc4r container/marketplace-operator namespace/openshift-marketplace: Liveness probe status=failure output="Get \"http://10.217.0.29:8080/healthz\": dial tcp 10.217.0.29:8080: connect: connection refused" start-of-body= Dec 06 15:37:10 crc kubenswrapper[5003]: I1206 15:37:10.642102 5003 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-marketplace/marketplace-operator-79b997595-lwc4r" podUID="765bb4a4-7c41-414b-a9be-a54be49b76ff" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.29:8080/healthz\": dial tcp 10.217.0.29:8080: connect: connection refused" Dec 06 15:37:11 crc kubenswrapper[5003]: I1206 15:37:11.122110 5003 generic.go:334] "Generic (PLEG): container finished" podID="765bb4a4-7c41-414b-a9be-a54be49b76ff" containerID="8983f81f3e009e763099d5f2f8745d5e96adc95d452468981c722677ee3c4f9d" exitCode=0 Dec 06 15:37:11 crc kubenswrapper[5003]: I1206 15:37:11.122160 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-lwc4r" event={"ID":"765bb4a4-7c41-414b-a9be-a54be49b76ff","Type":"ContainerDied","Data":"8983f81f3e009e763099d5f2f8745d5e96adc95d452468981c722677ee3c4f9d"} Dec 06 15:37:11 crc kubenswrapper[5003]: I1206 15:37:11.122693 5003 scope.go:117] "RemoveContainer" containerID="8983f81f3e009e763099d5f2f8745d5e96adc95d452468981c722677ee3c4f9d" Dec 06 15:37:12 crc kubenswrapper[5003]: I1206 15:37:12.132035 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-lwc4r" event={"ID":"765bb4a4-7c41-414b-a9be-a54be49b76ff","Type":"ContainerStarted","Data":"980c7720cf8d72efde230240c7082200502a541160ff9e312d89bce6e9dae4b0"} Dec 06 15:37:12 crc kubenswrapper[5003]: I1206 15:37:12.132649 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-lwc4r" Dec 06 15:37:12 crc kubenswrapper[5003]: I1206 15:37:12.136258 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-lwc4r" Dec 06 15:37:23 crc kubenswrapper[5003]: I1206 15:37:23.200451 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/1.log" Dec 06 15:37:23 crc kubenswrapper[5003]: I1206 15:37:23.202407 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Dec 06 15:37:23 crc kubenswrapper[5003]: I1206 15:37:23.202449 5003 generic.go:334] "Generic (PLEG): container finished" podID="f614b9022728cf315e60c057852e563e" containerID="4f874ed8c60f2fb82c96596812bcd8d2706b8e179532086b790372da4104b8bd" exitCode=137 Dec 06 15:37:23 crc kubenswrapper[5003]: I1206 15:37:23.202513 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerDied","Data":"4f874ed8c60f2fb82c96596812bcd8d2706b8e179532086b790372da4104b8bd"} Dec 06 15:37:23 crc kubenswrapper[5003]: I1206 15:37:23.202579 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"cbf4c6865c34edab0c2bb1f2ea30610ce785cbd66336946fd7024b82fc73f80c"} Dec 06 15:37:23 crc kubenswrapper[5003]: I1206 15:37:23.202598 5003 scope.go:117] "RemoveContainer" containerID="6f5d5d47cea1d06df2f36a0a15126c2419e5051d84842c781921577bec3c65f1" Dec 06 15:37:24 crc kubenswrapper[5003]: I1206 15:37:24.210110 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/1.log" Dec 06 15:37:25 crc kubenswrapper[5003]: I1206 15:37:25.364473 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 06 15:37:32 crc kubenswrapper[5003]: I1206 15:37:32.523550 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 06 15:37:32 crc kubenswrapper[5003]: I1206 15:37:32.528825 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 06 15:37:33 crc kubenswrapper[5003]: I1206 15:37:33.265998 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 06 15:37:41 crc kubenswrapper[5003]: I1206 15:37:41.605854 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-g9t4q"] Dec 06 15:37:41 crc kubenswrapper[5003]: I1206 15:37:41.606715 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-g9t4q" podUID="9990e306-5ae4-467e-9cc4-0225f9c05fc7" containerName="route-controller-manager" containerID="cri-o://db91c2a9628a0aad0aa0e7a1210c5736a4dabe6ac52cb6eb2ee1b064eabb752f" gracePeriod=30 Dec 06 15:37:41 crc kubenswrapper[5003]: I1206 15:37:41.634668 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-78q2b"] Dec 06 15:37:41 crc kubenswrapper[5003]: I1206 15:37:41.634936 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-879f6c89f-78q2b" podUID="86ec403c-0ca1-43ee-893e-917c87e5e174" containerName="controller-manager" containerID="cri-o://b67120dc0ad9b400cd63bfd6ad74752fc397838509fea16a52428052ac145c9a" gracePeriod=30 Dec 06 15:37:42 crc kubenswrapper[5003]: I1206 15:37:42.308105 5003 generic.go:334] "Generic (PLEG): container finished" podID="9990e306-5ae4-467e-9cc4-0225f9c05fc7" containerID="db91c2a9628a0aad0aa0e7a1210c5736a4dabe6ac52cb6eb2ee1b064eabb752f" exitCode=0 Dec 06 15:37:42 crc kubenswrapper[5003]: I1206 15:37:42.308221 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-g9t4q" event={"ID":"9990e306-5ae4-467e-9cc4-0225f9c05fc7","Type":"ContainerDied","Data":"db91c2a9628a0aad0aa0e7a1210c5736a4dabe6ac52cb6eb2ee1b064eabb752f"} Dec 06 15:37:42 crc kubenswrapper[5003]: I1206 15:37:42.309806 5003 generic.go:334] "Generic (PLEG): container finished" podID="86ec403c-0ca1-43ee-893e-917c87e5e174" containerID="b67120dc0ad9b400cd63bfd6ad74752fc397838509fea16a52428052ac145c9a" exitCode=0 Dec 06 15:37:42 crc kubenswrapper[5003]: I1206 15:37:42.309832 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-78q2b" event={"ID":"86ec403c-0ca1-43ee-893e-917c87e5e174","Type":"ContainerDied","Data":"b67120dc0ad9b400cd63bfd6ad74752fc397838509fea16a52428052ac145c9a"} Dec 06 15:37:42 crc kubenswrapper[5003]: I1206 15:37:42.474779 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-g9t4q" Dec 06 15:37:42 crc kubenswrapper[5003]: I1206 15:37:42.549951 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-78q2b" Dec 06 15:37:42 crc kubenswrapper[5003]: I1206 15:37:42.621962 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9990e306-5ae4-467e-9cc4-0225f9c05fc7-serving-cert\") pod \"9990e306-5ae4-467e-9cc4-0225f9c05fc7\" (UID: \"9990e306-5ae4-467e-9cc4-0225f9c05fc7\") " Dec 06 15:37:42 crc kubenswrapper[5003]: I1206 15:37:42.622034 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s7szd\" (UniqueName: \"kubernetes.io/projected/9990e306-5ae4-467e-9cc4-0225f9c05fc7-kube-api-access-s7szd\") pod \"9990e306-5ae4-467e-9cc4-0225f9c05fc7\" (UID: \"9990e306-5ae4-467e-9cc4-0225f9c05fc7\") " Dec 06 15:37:42 crc kubenswrapper[5003]: I1206 15:37:42.622074 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/9990e306-5ae4-467e-9cc4-0225f9c05fc7-client-ca\") pod \"9990e306-5ae4-467e-9cc4-0225f9c05fc7\" (UID: \"9990e306-5ae4-467e-9cc4-0225f9c05fc7\") " Dec 06 15:37:42 crc kubenswrapper[5003]: I1206 15:37:42.622131 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9990e306-5ae4-467e-9cc4-0225f9c05fc7-config\") pod \"9990e306-5ae4-467e-9cc4-0225f9c05fc7\" (UID: \"9990e306-5ae4-467e-9cc4-0225f9c05fc7\") " Dec 06 15:37:42 crc kubenswrapper[5003]: I1206 15:37:42.622808 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9990e306-5ae4-467e-9cc4-0225f9c05fc7-client-ca" (OuterVolumeSpecName: "client-ca") pod "9990e306-5ae4-467e-9cc4-0225f9c05fc7" (UID: "9990e306-5ae4-467e-9cc4-0225f9c05fc7"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 15:37:42 crc kubenswrapper[5003]: I1206 15:37:42.623413 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9990e306-5ae4-467e-9cc4-0225f9c05fc7-config" (OuterVolumeSpecName: "config") pod "9990e306-5ae4-467e-9cc4-0225f9c05fc7" (UID: "9990e306-5ae4-467e-9cc4-0225f9c05fc7"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 15:37:42 crc kubenswrapper[5003]: I1206 15:37:42.628509 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9990e306-5ae4-467e-9cc4-0225f9c05fc7-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "9990e306-5ae4-467e-9cc4-0225f9c05fc7" (UID: "9990e306-5ae4-467e-9cc4-0225f9c05fc7"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 15:37:42 crc kubenswrapper[5003]: I1206 15:37:42.628626 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9990e306-5ae4-467e-9cc4-0225f9c05fc7-kube-api-access-s7szd" (OuterVolumeSpecName: "kube-api-access-s7szd") pod "9990e306-5ae4-467e-9cc4-0225f9c05fc7" (UID: "9990e306-5ae4-467e-9cc4-0225f9c05fc7"). InnerVolumeSpecName "kube-api-access-s7szd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 15:37:42 crc kubenswrapper[5003]: I1206 15:37:42.723201 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/86ec403c-0ca1-43ee-893e-917c87e5e174-proxy-ca-bundles\") pod \"86ec403c-0ca1-43ee-893e-917c87e5e174\" (UID: \"86ec403c-0ca1-43ee-893e-917c87e5e174\") " Dec 06 15:37:42 crc kubenswrapper[5003]: I1206 15:37:42.723256 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wm9x4\" (UniqueName: \"kubernetes.io/projected/86ec403c-0ca1-43ee-893e-917c87e5e174-kube-api-access-wm9x4\") pod \"86ec403c-0ca1-43ee-893e-917c87e5e174\" (UID: \"86ec403c-0ca1-43ee-893e-917c87e5e174\") " Dec 06 15:37:42 crc kubenswrapper[5003]: I1206 15:37:42.723301 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/86ec403c-0ca1-43ee-893e-917c87e5e174-client-ca\") pod \"86ec403c-0ca1-43ee-893e-917c87e5e174\" (UID: \"86ec403c-0ca1-43ee-893e-917c87e5e174\") " Dec 06 15:37:42 crc kubenswrapper[5003]: I1206 15:37:42.723347 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/86ec403c-0ca1-43ee-893e-917c87e5e174-config\") pod \"86ec403c-0ca1-43ee-893e-917c87e5e174\" (UID: \"86ec403c-0ca1-43ee-893e-917c87e5e174\") " Dec 06 15:37:42 crc kubenswrapper[5003]: I1206 15:37:42.723407 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/86ec403c-0ca1-43ee-893e-917c87e5e174-serving-cert\") pod \"86ec403c-0ca1-43ee-893e-917c87e5e174\" (UID: \"86ec403c-0ca1-43ee-893e-917c87e5e174\") " Dec 06 15:37:42 crc kubenswrapper[5003]: I1206 15:37:42.723614 5003 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9990e306-5ae4-467e-9cc4-0225f9c05fc7-config\") on node \"crc\" DevicePath \"\"" Dec 06 15:37:42 crc kubenswrapper[5003]: I1206 15:37:42.723624 5003 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9990e306-5ae4-467e-9cc4-0225f9c05fc7-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 06 15:37:42 crc kubenswrapper[5003]: I1206 15:37:42.723633 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s7szd\" (UniqueName: \"kubernetes.io/projected/9990e306-5ae4-467e-9cc4-0225f9c05fc7-kube-api-access-s7szd\") on node \"crc\" DevicePath \"\"" Dec 06 15:37:42 crc kubenswrapper[5003]: I1206 15:37:42.723644 5003 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/9990e306-5ae4-467e-9cc4-0225f9c05fc7-client-ca\") on node \"crc\" DevicePath \"\"" Dec 06 15:37:42 crc kubenswrapper[5003]: I1206 15:37:42.724013 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/86ec403c-0ca1-43ee-893e-917c87e5e174-client-ca" (OuterVolumeSpecName: "client-ca") pod "86ec403c-0ca1-43ee-893e-917c87e5e174" (UID: "86ec403c-0ca1-43ee-893e-917c87e5e174"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 15:37:42 crc kubenswrapper[5003]: I1206 15:37:42.724122 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/86ec403c-0ca1-43ee-893e-917c87e5e174-config" (OuterVolumeSpecName: "config") pod "86ec403c-0ca1-43ee-893e-917c87e5e174" (UID: "86ec403c-0ca1-43ee-893e-917c87e5e174"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 15:37:42 crc kubenswrapper[5003]: I1206 15:37:42.724283 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/86ec403c-0ca1-43ee-893e-917c87e5e174-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "86ec403c-0ca1-43ee-893e-917c87e5e174" (UID: "86ec403c-0ca1-43ee-893e-917c87e5e174"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 15:37:42 crc kubenswrapper[5003]: I1206 15:37:42.726916 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/86ec403c-0ca1-43ee-893e-917c87e5e174-kube-api-access-wm9x4" (OuterVolumeSpecName: "kube-api-access-wm9x4") pod "86ec403c-0ca1-43ee-893e-917c87e5e174" (UID: "86ec403c-0ca1-43ee-893e-917c87e5e174"). InnerVolumeSpecName "kube-api-access-wm9x4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 15:37:42 crc kubenswrapper[5003]: I1206 15:37:42.727270 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/86ec403c-0ca1-43ee-893e-917c87e5e174-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "86ec403c-0ca1-43ee-893e-917c87e5e174" (UID: "86ec403c-0ca1-43ee-893e-917c87e5e174"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 15:37:42 crc kubenswrapper[5003]: I1206 15:37:42.759117 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-5cdc645687-t4hw4"] Dec 06 15:37:42 crc kubenswrapper[5003]: E1206 15:37:42.759370 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Dec 06 15:37:42 crc kubenswrapper[5003]: I1206 15:37:42.759388 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Dec 06 15:37:42 crc kubenswrapper[5003]: E1206 15:37:42.759406 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9990e306-5ae4-467e-9cc4-0225f9c05fc7" containerName="route-controller-manager" Dec 06 15:37:42 crc kubenswrapper[5003]: I1206 15:37:42.759414 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="9990e306-5ae4-467e-9cc4-0225f9c05fc7" containerName="route-controller-manager" Dec 06 15:37:42 crc kubenswrapper[5003]: E1206 15:37:42.759426 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="86ec403c-0ca1-43ee-893e-917c87e5e174" containerName="controller-manager" Dec 06 15:37:42 crc kubenswrapper[5003]: I1206 15:37:42.759435 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="86ec403c-0ca1-43ee-893e-917c87e5e174" containerName="controller-manager" Dec 06 15:37:42 crc kubenswrapper[5003]: I1206 15:37:42.759555 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Dec 06 15:37:42 crc kubenswrapper[5003]: I1206 15:37:42.759574 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="9990e306-5ae4-467e-9cc4-0225f9c05fc7" containerName="route-controller-manager" Dec 06 15:37:42 crc kubenswrapper[5003]: I1206 15:37:42.759584 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="86ec403c-0ca1-43ee-893e-917c87e5e174" containerName="controller-manager" Dec 06 15:37:42 crc kubenswrapper[5003]: I1206 15:37:42.759983 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-5cdc645687-t4hw4" Dec 06 15:37:42 crc kubenswrapper[5003]: I1206 15:37:42.766640 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-5cdc645687-t4hw4"] Dec 06 15:37:42 crc kubenswrapper[5003]: I1206 15:37:42.824357 5003 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/86ec403c-0ca1-43ee-893e-917c87e5e174-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 06 15:37:42 crc kubenswrapper[5003]: I1206 15:37:42.824392 5003 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/86ec403c-0ca1-43ee-893e-917c87e5e174-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 06 15:37:42 crc kubenswrapper[5003]: I1206 15:37:42.824405 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wm9x4\" (UniqueName: \"kubernetes.io/projected/86ec403c-0ca1-43ee-893e-917c87e5e174-kube-api-access-wm9x4\") on node \"crc\" DevicePath \"\"" Dec 06 15:37:42 crc kubenswrapper[5003]: I1206 15:37:42.824414 5003 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/86ec403c-0ca1-43ee-893e-917c87e5e174-client-ca\") on node \"crc\" DevicePath \"\"" Dec 06 15:37:42 crc kubenswrapper[5003]: I1206 15:37:42.824423 5003 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/86ec403c-0ca1-43ee-893e-917c87e5e174-config\") on node \"crc\" DevicePath \"\"" Dec 06 15:37:42 crc kubenswrapper[5003]: I1206 15:37:42.925180 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/82e86789-d814-41aa-9e8a-905e56aaed34-serving-cert\") pod \"controller-manager-5cdc645687-t4hw4\" (UID: \"82e86789-d814-41aa-9e8a-905e56aaed34\") " pod="openshift-controller-manager/controller-manager-5cdc645687-t4hw4" Dec 06 15:37:42 crc kubenswrapper[5003]: I1206 15:37:42.925224 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7lglk\" (UniqueName: \"kubernetes.io/projected/82e86789-d814-41aa-9e8a-905e56aaed34-kube-api-access-7lglk\") pod \"controller-manager-5cdc645687-t4hw4\" (UID: \"82e86789-d814-41aa-9e8a-905e56aaed34\") " pod="openshift-controller-manager/controller-manager-5cdc645687-t4hw4" Dec 06 15:37:42 crc kubenswrapper[5003]: I1206 15:37:42.925411 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/82e86789-d814-41aa-9e8a-905e56aaed34-proxy-ca-bundles\") pod \"controller-manager-5cdc645687-t4hw4\" (UID: \"82e86789-d814-41aa-9e8a-905e56aaed34\") " pod="openshift-controller-manager/controller-manager-5cdc645687-t4hw4" Dec 06 15:37:42 crc kubenswrapper[5003]: I1206 15:37:42.925465 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/82e86789-d814-41aa-9e8a-905e56aaed34-config\") pod \"controller-manager-5cdc645687-t4hw4\" (UID: \"82e86789-d814-41aa-9e8a-905e56aaed34\") " pod="openshift-controller-manager/controller-manager-5cdc645687-t4hw4" Dec 06 15:37:42 crc kubenswrapper[5003]: I1206 15:37:42.925537 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/82e86789-d814-41aa-9e8a-905e56aaed34-client-ca\") pod \"controller-manager-5cdc645687-t4hw4\" (UID: \"82e86789-d814-41aa-9e8a-905e56aaed34\") " pod="openshift-controller-manager/controller-manager-5cdc645687-t4hw4" Dec 06 15:37:43 crc kubenswrapper[5003]: I1206 15:37:43.027003 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/82e86789-d814-41aa-9e8a-905e56aaed34-proxy-ca-bundles\") pod \"controller-manager-5cdc645687-t4hw4\" (UID: \"82e86789-d814-41aa-9e8a-905e56aaed34\") " pod="openshift-controller-manager/controller-manager-5cdc645687-t4hw4" Dec 06 15:37:43 crc kubenswrapper[5003]: I1206 15:37:43.027066 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/82e86789-d814-41aa-9e8a-905e56aaed34-config\") pod \"controller-manager-5cdc645687-t4hw4\" (UID: \"82e86789-d814-41aa-9e8a-905e56aaed34\") " pod="openshift-controller-manager/controller-manager-5cdc645687-t4hw4" Dec 06 15:37:43 crc kubenswrapper[5003]: I1206 15:37:43.027086 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/82e86789-d814-41aa-9e8a-905e56aaed34-client-ca\") pod \"controller-manager-5cdc645687-t4hw4\" (UID: \"82e86789-d814-41aa-9e8a-905e56aaed34\") " pod="openshift-controller-manager/controller-manager-5cdc645687-t4hw4" Dec 06 15:37:43 crc kubenswrapper[5003]: I1206 15:37:43.027121 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/82e86789-d814-41aa-9e8a-905e56aaed34-serving-cert\") pod \"controller-manager-5cdc645687-t4hw4\" (UID: \"82e86789-d814-41aa-9e8a-905e56aaed34\") " pod="openshift-controller-manager/controller-manager-5cdc645687-t4hw4" Dec 06 15:37:43 crc kubenswrapper[5003]: I1206 15:37:43.027149 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7lglk\" (UniqueName: \"kubernetes.io/projected/82e86789-d814-41aa-9e8a-905e56aaed34-kube-api-access-7lglk\") pod \"controller-manager-5cdc645687-t4hw4\" (UID: \"82e86789-d814-41aa-9e8a-905e56aaed34\") " pod="openshift-controller-manager/controller-manager-5cdc645687-t4hw4" Dec 06 15:37:43 crc kubenswrapper[5003]: I1206 15:37:43.028679 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/82e86789-d814-41aa-9e8a-905e56aaed34-proxy-ca-bundles\") pod \"controller-manager-5cdc645687-t4hw4\" (UID: \"82e86789-d814-41aa-9e8a-905e56aaed34\") " pod="openshift-controller-manager/controller-manager-5cdc645687-t4hw4" Dec 06 15:37:43 crc kubenswrapper[5003]: I1206 15:37:43.028793 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/82e86789-d814-41aa-9e8a-905e56aaed34-client-ca\") pod \"controller-manager-5cdc645687-t4hw4\" (UID: \"82e86789-d814-41aa-9e8a-905e56aaed34\") " pod="openshift-controller-manager/controller-manager-5cdc645687-t4hw4" Dec 06 15:37:43 crc kubenswrapper[5003]: I1206 15:37:43.030144 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/82e86789-d814-41aa-9e8a-905e56aaed34-config\") pod \"controller-manager-5cdc645687-t4hw4\" (UID: \"82e86789-d814-41aa-9e8a-905e56aaed34\") " pod="openshift-controller-manager/controller-manager-5cdc645687-t4hw4" Dec 06 15:37:43 crc kubenswrapper[5003]: I1206 15:37:43.032176 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/82e86789-d814-41aa-9e8a-905e56aaed34-serving-cert\") pod \"controller-manager-5cdc645687-t4hw4\" (UID: \"82e86789-d814-41aa-9e8a-905e56aaed34\") " pod="openshift-controller-manager/controller-manager-5cdc645687-t4hw4" Dec 06 15:37:43 crc kubenswrapper[5003]: I1206 15:37:43.049170 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7lglk\" (UniqueName: \"kubernetes.io/projected/82e86789-d814-41aa-9e8a-905e56aaed34-kube-api-access-7lglk\") pod \"controller-manager-5cdc645687-t4hw4\" (UID: \"82e86789-d814-41aa-9e8a-905e56aaed34\") " pod="openshift-controller-manager/controller-manager-5cdc645687-t4hw4" Dec 06 15:37:43 crc kubenswrapper[5003]: I1206 15:37:43.077404 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-5cdc645687-t4hw4" Dec 06 15:37:43 crc kubenswrapper[5003]: I1206 15:37:43.296813 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-5cdc645687-t4hw4"] Dec 06 15:37:43 crc kubenswrapper[5003]: W1206 15:37:43.298207 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod82e86789_d814_41aa_9e8a_905e56aaed34.slice/crio-9730c1b5c787ce2dabd281ed284f9c1ec71007d62fa26f27ae6a3e0cf6c40b16 WatchSource:0}: Error finding container 9730c1b5c787ce2dabd281ed284f9c1ec71007d62fa26f27ae6a3e0cf6c40b16: Status 404 returned error can't find the container with id 9730c1b5c787ce2dabd281ed284f9c1ec71007d62fa26f27ae6a3e0cf6c40b16 Dec 06 15:37:43 crc kubenswrapper[5003]: I1206 15:37:43.318287 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-g9t4q" event={"ID":"9990e306-5ae4-467e-9cc4-0225f9c05fc7","Type":"ContainerDied","Data":"68c6897432878829293ce16b7a4c79f075d060bf066194281f2f0a3bec09891f"} Dec 06 15:37:43 crc kubenswrapper[5003]: I1206 15:37:43.318319 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-g9t4q" Dec 06 15:37:43 crc kubenswrapper[5003]: I1206 15:37:43.318354 5003 scope.go:117] "RemoveContainer" containerID="db91c2a9628a0aad0aa0e7a1210c5736a4dabe6ac52cb6eb2ee1b064eabb752f" Dec 06 15:37:43 crc kubenswrapper[5003]: I1206 15:37:43.320400 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-78q2b" event={"ID":"86ec403c-0ca1-43ee-893e-917c87e5e174","Type":"ContainerDied","Data":"9b175169a625061d1d06bf03bbd22737d89a54457888ed71b6c18ee4eb6107cd"} Dec 06 15:37:43 crc kubenswrapper[5003]: I1206 15:37:43.320542 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-78q2b" Dec 06 15:37:43 crc kubenswrapper[5003]: I1206 15:37:43.321763 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-5cdc645687-t4hw4" event={"ID":"82e86789-d814-41aa-9e8a-905e56aaed34","Type":"ContainerStarted","Data":"9730c1b5c787ce2dabd281ed284f9c1ec71007d62fa26f27ae6a3e0cf6c40b16"} Dec 06 15:37:43 crc kubenswrapper[5003]: I1206 15:37:43.342085 5003 scope.go:117] "RemoveContainer" containerID="b67120dc0ad9b400cd63bfd6ad74752fc397838509fea16a52428052ac145c9a" Dec 06 15:37:43 crc kubenswrapper[5003]: I1206 15:37:43.351517 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-g9t4q"] Dec 06 15:37:43 crc kubenswrapper[5003]: I1206 15:37:43.354368 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-g9t4q"] Dec 06 15:37:43 crc kubenswrapper[5003]: I1206 15:37:43.376614 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-78q2b"] Dec 06 15:37:43 crc kubenswrapper[5003]: I1206 15:37:43.379605 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-78q2b"] Dec 06 15:37:43 crc kubenswrapper[5003]: I1206 15:37:43.718576 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="86ec403c-0ca1-43ee-893e-917c87e5e174" path="/var/lib/kubelet/pods/86ec403c-0ca1-43ee-893e-917c87e5e174/volumes" Dec 06 15:37:43 crc kubenswrapper[5003]: I1206 15:37:43.719415 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9990e306-5ae4-467e-9cc4-0225f9c05fc7" path="/var/lib/kubelet/pods/9990e306-5ae4-467e-9cc4-0225f9c05fc7/volumes" Dec 06 15:37:43 crc kubenswrapper[5003]: I1206 15:37:43.759302 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-576498df55-lft48"] Dec 06 15:37:43 crc kubenswrapper[5003]: I1206 15:37:43.760077 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-576498df55-lft48" Dec 06 15:37:43 crc kubenswrapper[5003]: I1206 15:37:43.761744 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Dec 06 15:37:43 crc kubenswrapper[5003]: I1206 15:37:43.762367 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Dec 06 15:37:43 crc kubenswrapper[5003]: I1206 15:37:43.762378 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Dec 06 15:37:43 crc kubenswrapper[5003]: I1206 15:37:43.762970 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Dec 06 15:37:43 crc kubenswrapper[5003]: I1206 15:37:43.764304 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Dec 06 15:37:43 crc kubenswrapper[5003]: I1206 15:37:43.764924 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Dec 06 15:37:43 crc kubenswrapper[5003]: I1206 15:37:43.771455 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-576498df55-lft48"] Dec 06 15:37:43 crc kubenswrapper[5003]: I1206 15:37:43.939181 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0de3b75e-58bd-4473-b18c-7bbc41349671-config\") pod \"route-controller-manager-576498df55-lft48\" (UID: \"0de3b75e-58bd-4473-b18c-7bbc41349671\") " pod="openshift-route-controller-manager/route-controller-manager-576498df55-lft48" Dec 06 15:37:43 crc kubenswrapper[5003]: I1206 15:37:43.939260 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/0de3b75e-58bd-4473-b18c-7bbc41349671-client-ca\") pod \"route-controller-manager-576498df55-lft48\" (UID: \"0de3b75e-58bd-4473-b18c-7bbc41349671\") " pod="openshift-route-controller-manager/route-controller-manager-576498df55-lft48" Dec 06 15:37:43 crc kubenswrapper[5003]: I1206 15:37:43.939293 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gz8rb\" (UniqueName: \"kubernetes.io/projected/0de3b75e-58bd-4473-b18c-7bbc41349671-kube-api-access-gz8rb\") pod \"route-controller-manager-576498df55-lft48\" (UID: \"0de3b75e-58bd-4473-b18c-7bbc41349671\") " pod="openshift-route-controller-manager/route-controller-manager-576498df55-lft48" Dec 06 15:37:43 crc kubenswrapper[5003]: I1206 15:37:43.939352 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0de3b75e-58bd-4473-b18c-7bbc41349671-serving-cert\") pod \"route-controller-manager-576498df55-lft48\" (UID: \"0de3b75e-58bd-4473-b18c-7bbc41349671\") " pod="openshift-route-controller-manager/route-controller-manager-576498df55-lft48" Dec 06 15:37:44 crc kubenswrapper[5003]: I1206 15:37:44.046527 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0de3b75e-58bd-4473-b18c-7bbc41349671-config\") pod \"route-controller-manager-576498df55-lft48\" (UID: \"0de3b75e-58bd-4473-b18c-7bbc41349671\") " pod="openshift-route-controller-manager/route-controller-manager-576498df55-lft48" Dec 06 15:37:44 crc kubenswrapper[5003]: I1206 15:37:44.046883 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/0de3b75e-58bd-4473-b18c-7bbc41349671-client-ca\") pod \"route-controller-manager-576498df55-lft48\" (UID: \"0de3b75e-58bd-4473-b18c-7bbc41349671\") " pod="openshift-route-controller-manager/route-controller-manager-576498df55-lft48" Dec 06 15:37:44 crc kubenswrapper[5003]: I1206 15:37:44.047014 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gz8rb\" (UniqueName: \"kubernetes.io/projected/0de3b75e-58bd-4473-b18c-7bbc41349671-kube-api-access-gz8rb\") pod \"route-controller-manager-576498df55-lft48\" (UID: \"0de3b75e-58bd-4473-b18c-7bbc41349671\") " pod="openshift-route-controller-manager/route-controller-manager-576498df55-lft48" Dec 06 15:37:44 crc kubenswrapper[5003]: I1206 15:37:44.047158 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0de3b75e-58bd-4473-b18c-7bbc41349671-serving-cert\") pod \"route-controller-manager-576498df55-lft48\" (UID: \"0de3b75e-58bd-4473-b18c-7bbc41349671\") " pod="openshift-route-controller-manager/route-controller-manager-576498df55-lft48" Dec 06 15:37:44 crc kubenswrapper[5003]: I1206 15:37:44.047703 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/0de3b75e-58bd-4473-b18c-7bbc41349671-client-ca\") pod \"route-controller-manager-576498df55-lft48\" (UID: \"0de3b75e-58bd-4473-b18c-7bbc41349671\") " pod="openshift-route-controller-manager/route-controller-manager-576498df55-lft48" Dec 06 15:37:44 crc kubenswrapper[5003]: I1206 15:37:44.047942 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0de3b75e-58bd-4473-b18c-7bbc41349671-config\") pod \"route-controller-manager-576498df55-lft48\" (UID: \"0de3b75e-58bd-4473-b18c-7bbc41349671\") " pod="openshift-route-controller-manager/route-controller-manager-576498df55-lft48" Dec 06 15:37:44 crc kubenswrapper[5003]: I1206 15:37:44.062264 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0de3b75e-58bd-4473-b18c-7bbc41349671-serving-cert\") pod \"route-controller-manager-576498df55-lft48\" (UID: \"0de3b75e-58bd-4473-b18c-7bbc41349671\") " pod="openshift-route-controller-manager/route-controller-manager-576498df55-lft48" Dec 06 15:37:44 crc kubenswrapper[5003]: I1206 15:37:44.068434 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gz8rb\" (UniqueName: \"kubernetes.io/projected/0de3b75e-58bd-4473-b18c-7bbc41349671-kube-api-access-gz8rb\") pod \"route-controller-manager-576498df55-lft48\" (UID: \"0de3b75e-58bd-4473-b18c-7bbc41349671\") " pod="openshift-route-controller-manager/route-controller-manager-576498df55-lft48" Dec 06 15:37:44 crc kubenswrapper[5003]: I1206 15:37:44.080439 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-576498df55-lft48" Dec 06 15:37:44 crc kubenswrapper[5003]: I1206 15:37:44.282342 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-576498df55-lft48"] Dec 06 15:37:44 crc kubenswrapper[5003]: I1206 15:37:44.358734 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-576498df55-lft48" event={"ID":"0de3b75e-58bd-4473-b18c-7bbc41349671","Type":"ContainerStarted","Data":"9e020985cab8ce62238d0cd4dfaf90bf5e7e5a8915c42c3adcde50468d8d2634"} Dec 06 15:37:44 crc kubenswrapper[5003]: I1206 15:37:44.395402 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-5cdc645687-t4hw4" event={"ID":"82e86789-d814-41aa-9e8a-905e56aaed34","Type":"ContainerStarted","Data":"42699a6f98fdfad2595e1c567edc5a1837ef778a531a38d6c32bb1c3de9e9f25"} Dec 06 15:37:44 crc kubenswrapper[5003]: I1206 15:37:44.396957 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-5cdc645687-t4hw4" Dec 06 15:37:44 crc kubenswrapper[5003]: I1206 15:37:44.413124 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-5cdc645687-t4hw4" Dec 06 15:37:44 crc kubenswrapper[5003]: I1206 15:37:44.438156 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-5cdc645687-t4hw4" podStartSLOduration=3.438132075 podStartE2EDuration="3.438132075s" podCreationTimestamp="2025-12-06 15:37:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 15:37:44.432748377 +0000 UTC m=+342.966102768" watchObservedRunningTime="2025-12-06 15:37:44.438132075 +0000 UTC m=+342.971486456" Dec 06 15:37:45 crc kubenswrapper[5003]: I1206 15:37:45.403445 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-576498df55-lft48" event={"ID":"0de3b75e-58bd-4473-b18c-7bbc41349671","Type":"ContainerStarted","Data":"47de94e911ec2a81e9fa45ade929eba7f7c9e800c1da88f92adef8624dd0954b"} Dec 06 15:37:45 crc kubenswrapper[5003]: I1206 15:37:45.422426 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-576498df55-lft48" podStartSLOduration=4.422408463 podStartE2EDuration="4.422408463s" podCreationTimestamp="2025-12-06 15:37:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 15:37:45.420504147 +0000 UTC m=+343.953858538" watchObservedRunningTime="2025-12-06 15:37:45.422408463 +0000 UTC m=+343.955762844" Dec 06 15:37:46 crc kubenswrapper[5003]: I1206 15:37:46.408091 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-576498df55-lft48" Dec 06 15:37:46 crc kubenswrapper[5003]: I1206 15:37:46.412968 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-576498df55-lft48" Dec 06 15:37:50 crc kubenswrapper[5003]: I1206 15:37:50.442550 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-5cdc645687-t4hw4"] Dec 06 15:37:50 crc kubenswrapper[5003]: I1206 15:37:50.443027 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-5cdc645687-t4hw4" podUID="82e86789-d814-41aa-9e8a-905e56aaed34" containerName="controller-manager" containerID="cri-o://42699a6f98fdfad2595e1c567edc5a1837ef778a531a38d6c32bb1c3de9e9f25" gracePeriod=30 Dec 06 15:37:50 crc kubenswrapper[5003]: I1206 15:37:50.463050 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-576498df55-lft48"] Dec 06 15:37:50 crc kubenswrapper[5003]: I1206 15:37:50.463274 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-576498df55-lft48" podUID="0de3b75e-58bd-4473-b18c-7bbc41349671" containerName="route-controller-manager" containerID="cri-o://47de94e911ec2a81e9fa45ade929eba7f7c9e800c1da88f92adef8624dd0954b" gracePeriod=30 Dec 06 15:37:51 crc kubenswrapper[5003]: I1206 15:37:51.005282 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-576498df55-lft48" Dec 06 15:37:51 crc kubenswrapper[5003]: I1206 15:37:51.141116 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/0de3b75e-58bd-4473-b18c-7bbc41349671-client-ca\") pod \"0de3b75e-58bd-4473-b18c-7bbc41349671\" (UID: \"0de3b75e-58bd-4473-b18c-7bbc41349671\") " Dec 06 15:37:51 crc kubenswrapper[5003]: I1206 15:37:51.141440 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0de3b75e-58bd-4473-b18c-7bbc41349671-config\") pod \"0de3b75e-58bd-4473-b18c-7bbc41349671\" (UID: \"0de3b75e-58bd-4473-b18c-7bbc41349671\") " Dec 06 15:37:51 crc kubenswrapper[5003]: I1206 15:37:51.141532 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0de3b75e-58bd-4473-b18c-7bbc41349671-serving-cert\") pod \"0de3b75e-58bd-4473-b18c-7bbc41349671\" (UID: \"0de3b75e-58bd-4473-b18c-7bbc41349671\") " Dec 06 15:37:51 crc kubenswrapper[5003]: I1206 15:37:51.141663 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gz8rb\" (UniqueName: \"kubernetes.io/projected/0de3b75e-58bd-4473-b18c-7bbc41349671-kube-api-access-gz8rb\") pod \"0de3b75e-58bd-4473-b18c-7bbc41349671\" (UID: \"0de3b75e-58bd-4473-b18c-7bbc41349671\") " Dec 06 15:37:51 crc kubenswrapper[5003]: I1206 15:37:51.141849 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0de3b75e-58bd-4473-b18c-7bbc41349671-client-ca" (OuterVolumeSpecName: "client-ca") pod "0de3b75e-58bd-4473-b18c-7bbc41349671" (UID: "0de3b75e-58bd-4473-b18c-7bbc41349671"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 15:37:51 crc kubenswrapper[5003]: I1206 15:37:51.141955 5003 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/0de3b75e-58bd-4473-b18c-7bbc41349671-client-ca\") on node \"crc\" DevicePath \"\"" Dec 06 15:37:51 crc kubenswrapper[5003]: I1206 15:37:51.141966 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0de3b75e-58bd-4473-b18c-7bbc41349671-config" (OuterVolumeSpecName: "config") pod "0de3b75e-58bd-4473-b18c-7bbc41349671" (UID: "0de3b75e-58bd-4473-b18c-7bbc41349671"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 15:37:51 crc kubenswrapper[5003]: I1206 15:37:51.148667 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0de3b75e-58bd-4473-b18c-7bbc41349671-kube-api-access-gz8rb" (OuterVolumeSpecName: "kube-api-access-gz8rb") pod "0de3b75e-58bd-4473-b18c-7bbc41349671" (UID: "0de3b75e-58bd-4473-b18c-7bbc41349671"). InnerVolumeSpecName "kube-api-access-gz8rb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 15:37:51 crc kubenswrapper[5003]: I1206 15:37:51.164191 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0de3b75e-58bd-4473-b18c-7bbc41349671-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "0de3b75e-58bd-4473-b18c-7bbc41349671" (UID: "0de3b75e-58bd-4473-b18c-7bbc41349671"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 15:37:51 crc kubenswrapper[5003]: I1206 15:37:51.242930 5003 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0de3b75e-58bd-4473-b18c-7bbc41349671-config\") on node \"crc\" DevicePath \"\"" Dec 06 15:37:51 crc kubenswrapper[5003]: I1206 15:37:51.242966 5003 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0de3b75e-58bd-4473-b18c-7bbc41349671-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 06 15:37:51 crc kubenswrapper[5003]: I1206 15:37:51.242979 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gz8rb\" (UniqueName: \"kubernetes.io/projected/0de3b75e-58bd-4473-b18c-7bbc41349671-kube-api-access-gz8rb\") on node \"crc\" DevicePath \"\"" Dec 06 15:37:51 crc kubenswrapper[5003]: I1206 15:37:51.435560 5003 generic.go:334] "Generic (PLEG): container finished" podID="82e86789-d814-41aa-9e8a-905e56aaed34" containerID="42699a6f98fdfad2595e1c567edc5a1837ef778a531a38d6c32bb1c3de9e9f25" exitCode=0 Dec 06 15:37:51 crc kubenswrapper[5003]: I1206 15:37:51.435653 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-5cdc645687-t4hw4" event={"ID":"82e86789-d814-41aa-9e8a-905e56aaed34","Type":"ContainerDied","Data":"42699a6f98fdfad2595e1c567edc5a1837ef778a531a38d6c32bb1c3de9e9f25"} Dec 06 15:37:51 crc kubenswrapper[5003]: I1206 15:37:51.437643 5003 generic.go:334] "Generic (PLEG): container finished" podID="0de3b75e-58bd-4473-b18c-7bbc41349671" containerID="47de94e911ec2a81e9fa45ade929eba7f7c9e800c1da88f92adef8624dd0954b" exitCode=0 Dec 06 15:37:51 crc kubenswrapper[5003]: I1206 15:37:51.437699 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-576498df55-lft48" event={"ID":"0de3b75e-58bd-4473-b18c-7bbc41349671","Type":"ContainerDied","Data":"47de94e911ec2a81e9fa45ade929eba7f7c9e800c1da88f92adef8624dd0954b"} Dec 06 15:37:51 crc kubenswrapper[5003]: I1206 15:37:51.437730 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-576498df55-lft48" event={"ID":"0de3b75e-58bd-4473-b18c-7bbc41349671","Type":"ContainerDied","Data":"9e020985cab8ce62238d0cd4dfaf90bf5e7e5a8915c42c3adcde50468d8d2634"} Dec 06 15:37:51 crc kubenswrapper[5003]: I1206 15:37:51.437752 5003 scope.go:117] "RemoveContainer" containerID="47de94e911ec2a81e9fa45ade929eba7f7c9e800c1da88f92adef8624dd0954b" Dec 06 15:37:51 crc kubenswrapper[5003]: I1206 15:37:51.437893 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-576498df55-lft48" Dec 06 15:37:51 crc kubenswrapper[5003]: I1206 15:37:51.447291 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-5cdc645687-t4hw4" Dec 06 15:37:51 crc kubenswrapper[5003]: I1206 15:37:51.454181 5003 scope.go:117] "RemoveContainer" containerID="47de94e911ec2a81e9fa45ade929eba7f7c9e800c1da88f92adef8624dd0954b" Dec 06 15:37:51 crc kubenswrapper[5003]: E1206 15:37:51.454536 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"47de94e911ec2a81e9fa45ade929eba7f7c9e800c1da88f92adef8624dd0954b\": container with ID starting with 47de94e911ec2a81e9fa45ade929eba7f7c9e800c1da88f92adef8624dd0954b not found: ID does not exist" containerID="47de94e911ec2a81e9fa45ade929eba7f7c9e800c1da88f92adef8624dd0954b" Dec 06 15:37:51 crc kubenswrapper[5003]: I1206 15:37:51.454582 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"47de94e911ec2a81e9fa45ade929eba7f7c9e800c1da88f92adef8624dd0954b"} err="failed to get container status \"47de94e911ec2a81e9fa45ade929eba7f7c9e800c1da88f92adef8624dd0954b\": rpc error: code = NotFound desc = could not find container \"47de94e911ec2a81e9fa45ade929eba7f7c9e800c1da88f92adef8624dd0954b\": container with ID starting with 47de94e911ec2a81e9fa45ade929eba7f7c9e800c1da88f92adef8624dd0954b not found: ID does not exist" Dec 06 15:37:51 crc kubenswrapper[5003]: I1206 15:37:51.482170 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-576498df55-lft48"] Dec 06 15:37:51 crc kubenswrapper[5003]: I1206 15:37:51.490849 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-576498df55-lft48"] Dec 06 15:37:51 crc kubenswrapper[5003]: I1206 15:37:51.646818 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/82e86789-d814-41aa-9e8a-905e56aaed34-serving-cert\") pod \"82e86789-d814-41aa-9e8a-905e56aaed34\" (UID: \"82e86789-d814-41aa-9e8a-905e56aaed34\") " Dec 06 15:37:51 crc kubenswrapper[5003]: I1206 15:37:51.646881 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7lglk\" (UniqueName: \"kubernetes.io/projected/82e86789-d814-41aa-9e8a-905e56aaed34-kube-api-access-7lglk\") pod \"82e86789-d814-41aa-9e8a-905e56aaed34\" (UID: \"82e86789-d814-41aa-9e8a-905e56aaed34\") " Dec 06 15:37:51 crc kubenswrapper[5003]: I1206 15:37:51.646983 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/82e86789-d814-41aa-9e8a-905e56aaed34-config\") pod \"82e86789-d814-41aa-9e8a-905e56aaed34\" (UID: \"82e86789-d814-41aa-9e8a-905e56aaed34\") " Dec 06 15:37:51 crc kubenswrapper[5003]: I1206 15:37:51.647031 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/82e86789-d814-41aa-9e8a-905e56aaed34-proxy-ca-bundles\") pod \"82e86789-d814-41aa-9e8a-905e56aaed34\" (UID: \"82e86789-d814-41aa-9e8a-905e56aaed34\") " Dec 06 15:37:51 crc kubenswrapper[5003]: I1206 15:37:51.647109 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/82e86789-d814-41aa-9e8a-905e56aaed34-client-ca\") pod \"82e86789-d814-41aa-9e8a-905e56aaed34\" (UID: \"82e86789-d814-41aa-9e8a-905e56aaed34\") " Dec 06 15:37:51 crc kubenswrapper[5003]: I1206 15:37:51.648101 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/82e86789-d814-41aa-9e8a-905e56aaed34-client-ca" (OuterVolumeSpecName: "client-ca") pod "82e86789-d814-41aa-9e8a-905e56aaed34" (UID: "82e86789-d814-41aa-9e8a-905e56aaed34"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 15:37:51 crc kubenswrapper[5003]: I1206 15:37:51.648599 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/82e86789-d814-41aa-9e8a-905e56aaed34-config" (OuterVolumeSpecName: "config") pod "82e86789-d814-41aa-9e8a-905e56aaed34" (UID: "82e86789-d814-41aa-9e8a-905e56aaed34"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 15:37:51 crc kubenswrapper[5003]: I1206 15:37:51.649280 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/82e86789-d814-41aa-9e8a-905e56aaed34-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "82e86789-d814-41aa-9e8a-905e56aaed34" (UID: "82e86789-d814-41aa-9e8a-905e56aaed34"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 15:37:51 crc kubenswrapper[5003]: I1206 15:37:51.651421 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/82e86789-d814-41aa-9e8a-905e56aaed34-kube-api-access-7lglk" (OuterVolumeSpecName: "kube-api-access-7lglk") pod "82e86789-d814-41aa-9e8a-905e56aaed34" (UID: "82e86789-d814-41aa-9e8a-905e56aaed34"). InnerVolumeSpecName "kube-api-access-7lglk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 15:37:51 crc kubenswrapper[5003]: I1206 15:37:51.668373 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/82e86789-d814-41aa-9e8a-905e56aaed34-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "82e86789-d814-41aa-9e8a-905e56aaed34" (UID: "82e86789-d814-41aa-9e8a-905e56aaed34"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 15:37:51 crc kubenswrapper[5003]: I1206 15:37:51.718015 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0de3b75e-58bd-4473-b18c-7bbc41349671" path="/var/lib/kubelet/pods/0de3b75e-58bd-4473-b18c-7bbc41349671/volumes" Dec 06 15:37:51 crc kubenswrapper[5003]: I1206 15:37:51.748946 5003 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/82e86789-d814-41aa-9e8a-905e56aaed34-config\") on node \"crc\" DevicePath \"\"" Dec 06 15:37:51 crc kubenswrapper[5003]: I1206 15:37:51.748990 5003 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/82e86789-d814-41aa-9e8a-905e56aaed34-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 06 15:37:51 crc kubenswrapper[5003]: I1206 15:37:51.749003 5003 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/82e86789-d814-41aa-9e8a-905e56aaed34-client-ca\") on node \"crc\" DevicePath \"\"" Dec 06 15:37:51 crc kubenswrapper[5003]: I1206 15:37:51.749016 5003 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/82e86789-d814-41aa-9e8a-905e56aaed34-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 06 15:37:51 crc kubenswrapper[5003]: I1206 15:37:51.749028 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7lglk\" (UniqueName: \"kubernetes.io/projected/82e86789-d814-41aa-9e8a-905e56aaed34-kube-api-access-7lglk\") on node \"crc\" DevicePath \"\"" Dec 06 15:37:51 crc kubenswrapper[5003]: I1206 15:37:51.769198 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-d5469898f-2n2g6"] Dec 06 15:37:51 crc kubenswrapper[5003]: E1206 15:37:51.769469 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="82e86789-d814-41aa-9e8a-905e56aaed34" containerName="controller-manager" Dec 06 15:37:51 crc kubenswrapper[5003]: I1206 15:37:51.769513 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="82e86789-d814-41aa-9e8a-905e56aaed34" containerName="controller-manager" Dec 06 15:37:51 crc kubenswrapper[5003]: E1206 15:37:51.769533 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0de3b75e-58bd-4473-b18c-7bbc41349671" containerName="route-controller-manager" Dec 06 15:37:51 crc kubenswrapper[5003]: I1206 15:37:51.769543 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="0de3b75e-58bd-4473-b18c-7bbc41349671" containerName="route-controller-manager" Dec 06 15:37:51 crc kubenswrapper[5003]: I1206 15:37:51.769670 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="82e86789-d814-41aa-9e8a-905e56aaed34" containerName="controller-manager" Dec 06 15:37:51 crc kubenswrapper[5003]: I1206 15:37:51.769697 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="0de3b75e-58bd-4473-b18c-7bbc41349671" containerName="route-controller-manager" Dec 06 15:37:51 crc kubenswrapper[5003]: I1206 15:37:51.770142 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-d5469898f-2n2g6" Dec 06 15:37:51 crc kubenswrapper[5003]: I1206 15:37:51.776827 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-85d49d74df-q57m7"] Dec 06 15:37:51 crc kubenswrapper[5003]: I1206 15:37:51.778132 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-85d49d74df-q57m7" Dec 06 15:37:51 crc kubenswrapper[5003]: I1206 15:37:51.781607 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Dec 06 15:37:51 crc kubenswrapper[5003]: I1206 15:37:51.781929 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Dec 06 15:37:51 crc kubenswrapper[5003]: I1206 15:37:51.782259 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Dec 06 15:37:51 crc kubenswrapper[5003]: I1206 15:37:51.782549 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Dec 06 15:37:51 crc kubenswrapper[5003]: I1206 15:37:51.783179 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Dec 06 15:37:51 crc kubenswrapper[5003]: I1206 15:37:51.784061 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Dec 06 15:37:51 crc kubenswrapper[5003]: I1206 15:37:51.798442 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-d5469898f-2n2g6"] Dec 06 15:37:51 crc kubenswrapper[5003]: I1206 15:37:51.803006 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-85d49d74df-q57m7"] Dec 06 15:37:51 crc kubenswrapper[5003]: I1206 15:37:51.950959 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8862n\" (UniqueName: \"kubernetes.io/projected/b0111cb9-ae42-4dde-909d-e4b63a81e134-kube-api-access-8862n\") pod \"route-controller-manager-85d49d74df-q57m7\" (UID: \"b0111cb9-ae42-4dde-909d-e4b63a81e134\") " pod="openshift-route-controller-manager/route-controller-manager-85d49d74df-q57m7" Dec 06 15:37:51 crc kubenswrapper[5003]: I1206 15:37:51.951260 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b0111cb9-ae42-4dde-909d-e4b63a81e134-serving-cert\") pod \"route-controller-manager-85d49d74df-q57m7\" (UID: \"b0111cb9-ae42-4dde-909d-e4b63a81e134\") " pod="openshift-route-controller-manager/route-controller-manager-85d49d74df-q57m7" Dec 06 15:37:51 crc kubenswrapper[5003]: I1206 15:37:51.951384 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c333c360-f353-4c22-a612-fd29256636c2-serving-cert\") pod \"controller-manager-d5469898f-2n2g6\" (UID: \"c333c360-f353-4c22-a612-fd29256636c2\") " pod="openshift-controller-manager/controller-manager-d5469898f-2n2g6" Dec 06 15:37:51 crc kubenswrapper[5003]: I1206 15:37:51.951563 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/c333c360-f353-4c22-a612-fd29256636c2-client-ca\") pod \"controller-manager-d5469898f-2n2g6\" (UID: \"c333c360-f353-4c22-a612-fd29256636c2\") " pod="openshift-controller-manager/controller-manager-d5469898f-2n2g6" Dec 06 15:37:51 crc kubenswrapper[5003]: I1206 15:37:51.951668 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zwd2m\" (UniqueName: \"kubernetes.io/projected/c333c360-f353-4c22-a612-fd29256636c2-kube-api-access-zwd2m\") pod \"controller-manager-d5469898f-2n2g6\" (UID: \"c333c360-f353-4c22-a612-fd29256636c2\") " pod="openshift-controller-manager/controller-manager-d5469898f-2n2g6" Dec 06 15:37:51 crc kubenswrapper[5003]: I1206 15:37:51.951716 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/b0111cb9-ae42-4dde-909d-e4b63a81e134-client-ca\") pod \"route-controller-manager-85d49d74df-q57m7\" (UID: \"b0111cb9-ae42-4dde-909d-e4b63a81e134\") " pod="openshift-route-controller-manager/route-controller-manager-85d49d74df-q57m7" Dec 06 15:37:51 crc kubenswrapper[5003]: I1206 15:37:51.951744 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c333c360-f353-4c22-a612-fd29256636c2-config\") pod \"controller-manager-d5469898f-2n2g6\" (UID: \"c333c360-f353-4c22-a612-fd29256636c2\") " pod="openshift-controller-manager/controller-manager-d5469898f-2n2g6" Dec 06 15:37:51 crc kubenswrapper[5003]: I1206 15:37:51.951767 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b0111cb9-ae42-4dde-909d-e4b63a81e134-config\") pod \"route-controller-manager-85d49d74df-q57m7\" (UID: \"b0111cb9-ae42-4dde-909d-e4b63a81e134\") " pod="openshift-route-controller-manager/route-controller-manager-85d49d74df-q57m7" Dec 06 15:37:51 crc kubenswrapper[5003]: I1206 15:37:51.951784 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/c333c360-f353-4c22-a612-fd29256636c2-proxy-ca-bundles\") pod \"controller-manager-d5469898f-2n2g6\" (UID: \"c333c360-f353-4c22-a612-fd29256636c2\") " pod="openshift-controller-manager/controller-manager-d5469898f-2n2g6" Dec 06 15:37:52 crc kubenswrapper[5003]: I1206 15:37:52.052922 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8862n\" (UniqueName: \"kubernetes.io/projected/b0111cb9-ae42-4dde-909d-e4b63a81e134-kube-api-access-8862n\") pod \"route-controller-manager-85d49d74df-q57m7\" (UID: \"b0111cb9-ae42-4dde-909d-e4b63a81e134\") " pod="openshift-route-controller-manager/route-controller-manager-85d49d74df-q57m7" Dec 06 15:37:52 crc kubenswrapper[5003]: I1206 15:37:52.052964 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b0111cb9-ae42-4dde-909d-e4b63a81e134-serving-cert\") pod \"route-controller-manager-85d49d74df-q57m7\" (UID: \"b0111cb9-ae42-4dde-909d-e4b63a81e134\") " pod="openshift-route-controller-manager/route-controller-manager-85d49d74df-q57m7" Dec 06 15:37:52 crc kubenswrapper[5003]: I1206 15:37:52.052982 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c333c360-f353-4c22-a612-fd29256636c2-serving-cert\") pod \"controller-manager-d5469898f-2n2g6\" (UID: \"c333c360-f353-4c22-a612-fd29256636c2\") " pod="openshift-controller-manager/controller-manager-d5469898f-2n2g6" Dec 06 15:37:52 crc kubenswrapper[5003]: I1206 15:37:52.052998 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/c333c360-f353-4c22-a612-fd29256636c2-client-ca\") pod \"controller-manager-d5469898f-2n2g6\" (UID: \"c333c360-f353-4c22-a612-fd29256636c2\") " pod="openshift-controller-manager/controller-manager-d5469898f-2n2g6" Dec 06 15:37:52 crc kubenswrapper[5003]: I1206 15:37:52.053029 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zwd2m\" (UniqueName: \"kubernetes.io/projected/c333c360-f353-4c22-a612-fd29256636c2-kube-api-access-zwd2m\") pod \"controller-manager-d5469898f-2n2g6\" (UID: \"c333c360-f353-4c22-a612-fd29256636c2\") " pod="openshift-controller-manager/controller-manager-d5469898f-2n2g6" Dec 06 15:37:52 crc kubenswrapper[5003]: I1206 15:37:52.053053 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/b0111cb9-ae42-4dde-909d-e4b63a81e134-client-ca\") pod \"route-controller-manager-85d49d74df-q57m7\" (UID: \"b0111cb9-ae42-4dde-909d-e4b63a81e134\") " pod="openshift-route-controller-manager/route-controller-manager-85d49d74df-q57m7" Dec 06 15:37:52 crc kubenswrapper[5003]: I1206 15:37:52.053070 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c333c360-f353-4c22-a612-fd29256636c2-config\") pod \"controller-manager-d5469898f-2n2g6\" (UID: \"c333c360-f353-4c22-a612-fd29256636c2\") " pod="openshift-controller-manager/controller-manager-d5469898f-2n2g6" Dec 06 15:37:52 crc kubenswrapper[5003]: I1206 15:37:52.053087 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b0111cb9-ae42-4dde-909d-e4b63a81e134-config\") pod \"route-controller-manager-85d49d74df-q57m7\" (UID: \"b0111cb9-ae42-4dde-909d-e4b63a81e134\") " pod="openshift-route-controller-manager/route-controller-manager-85d49d74df-q57m7" Dec 06 15:37:52 crc kubenswrapper[5003]: I1206 15:37:52.053101 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/c333c360-f353-4c22-a612-fd29256636c2-proxy-ca-bundles\") pod \"controller-manager-d5469898f-2n2g6\" (UID: \"c333c360-f353-4c22-a612-fd29256636c2\") " pod="openshift-controller-manager/controller-manager-d5469898f-2n2g6" Dec 06 15:37:52 crc kubenswrapper[5003]: I1206 15:37:52.054313 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/c333c360-f353-4c22-a612-fd29256636c2-proxy-ca-bundles\") pod \"controller-manager-d5469898f-2n2g6\" (UID: \"c333c360-f353-4c22-a612-fd29256636c2\") " pod="openshift-controller-manager/controller-manager-d5469898f-2n2g6" Dec 06 15:37:52 crc kubenswrapper[5003]: I1206 15:37:52.054717 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/c333c360-f353-4c22-a612-fd29256636c2-client-ca\") pod \"controller-manager-d5469898f-2n2g6\" (UID: \"c333c360-f353-4c22-a612-fd29256636c2\") " pod="openshift-controller-manager/controller-manager-d5469898f-2n2g6" Dec 06 15:37:52 crc kubenswrapper[5003]: I1206 15:37:52.054838 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b0111cb9-ae42-4dde-909d-e4b63a81e134-config\") pod \"route-controller-manager-85d49d74df-q57m7\" (UID: \"b0111cb9-ae42-4dde-909d-e4b63a81e134\") " pod="openshift-route-controller-manager/route-controller-manager-85d49d74df-q57m7" Dec 06 15:37:52 crc kubenswrapper[5003]: I1206 15:37:52.055040 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c333c360-f353-4c22-a612-fd29256636c2-config\") pod \"controller-manager-d5469898f-2n2g6\" (UID: \"c333c360-f353-4c22-a612-fd29256636c2\") " pod="openshift-controller-manager/controller-manager-d5469898f-2n2g6" Dec 06 15:37:52 crc kubenswrapper[5003]: I1206 15:37:52.055157 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/b0111cb9-ae42-4dde-909d-e4b63a81e134-client-ca\") pod \"route-controller-manager-85d49d74df-q57m7\" (UID: \"b0111cb9-ae42-4dde-909d-e4b63a81e134\") " pod="openshift-route-controller-manager/route-controller-manager-85d49d74df-q57m7" Dec 06 15:37:52 crc kubenswrapper[5003]: I1206 15:37:52.056367 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c333c360-f353-4c22-a612-fd29256636c2-serving-cert\") pod \"controller-manager-d5469898f-2n2g6\" (UID: \"c333c360-f353-4c22-a612-fd29256636c2\") " pod="openshift-controller-manager/controller-manager-d5469898f-2n2g6" Dec 06 15:37:52 crc kubenswrapper[5003]: I1206 15:37:52.060153 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b0111cb9-ae42-4dde-909d-e4b63a81e134-serving-cert\") pod \"route-controller-manager-85d49d74df-q57m7\" (UID: \"b0111cb9-ae42-4dde-909d-e4b63a81e134\") " pod="openshift-route-controller-manager/route-controller-manager-85d49d74df-q57m7" Dec 06 15:37:52 crc kubenswrapper[5003]: I1206 15:37:52.080248 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8862n\" (UniqueName: \"kubernetes.io/projected/b0111cb9-ae42-4dde-909d-e4b63a81e134-kube-api-access-8862n\") pod \"route-controller-manager-85d49d74df-q57m7\" (UID: \"b0111cb9-ae42-4dde-909d-e4b63a81e134\") " pod="openshift-route-controller-manager/route-controller-manager-85d49d74df-q57m7" Dec 06 15:37:52 crc kubenswrapper[5003]: I1206 15:37:52.080592 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zwd2m\" (UniqueName: \"kubernetes.io/projected/c333c360-f353-4c22-a612-fd29256636c2-kube-api-access-zwd2m\") pod \"controller-manager-d5469898f-2n2g6\" (UID: \"c333c360-f353-4c22-a612-fd29256636c2\") " pod="openshift-controller-manager/controller-manager-d5469898f-2n2g6" Dec 06 15:37:52 crc kubenswrapper[5003]: I1206 15:37:52.100091 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-d5469898f-2n2g6" Dec 06 15:37:52 crc kubenswrapper[5003]: I1206 15:37:52.107539 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-85d49d74df-q57m7" Dec 06 15:37:52 crc kubenswrapper[5003]: I1206 15:37:52.372777 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-85d49d74df-q57m7"] Dec 06 15:37:52 crc kubenswrapper[5003]: I1206 15:37:52.402704 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-d5469898f-2n2g6"] Dec 06 15:37:52 crc kubenswrapper[5003]: I1206 15:37:52.443504 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-d5469898f-2n2g6" event={"ID":"c333c360-f353-4c22-a612-fd29256636c2","Type":"ContainerStarted","Data":"af5aa63fb738cee5c9285e8669ff05da15c68342e1dc34c6f73bc6cdfa7ccbb6"} Dec 06 15:37:52 crc kubenswrapper[5003]: I1206 15:37:52.448738 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-85d49d74df-q57m7" event={"ID":"b0111cb9-ae42-4dde-909d-e4b63a81e134","Type":"ContainerStarted","Data":"ee96d57181969fcb3152e5c7676136c7e67d4d8fd8c8ecb7e9d105a74be87556"} Dec 06 15:37:52 crc kubenswrapper[5003]: I1206 15:37:52.468236 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-5cdc645687-t4hw4" event={"ID":"82e86789-d814-41aa-9e8a-905e56aaed34","Type":"ContainerDied","Data":"9730c1b5c787ce2dabd281ed284f9c1ec71007d62fa26f27ae6a3e0cf6c40b16"} Dec 06 15:37:52 crc kubenswrapper[5003]: I1206 15:37:52.468284 5003 scope.go:117] "RemoveContainer" containerID="42699a6f98fdfad2595e1c567edc5a1837ef778a531a38d6c32bb1c3de9e9f25" Dec 06 15:37:52 crc kubenswrapper[5003]: I1206 15:37:52.468423 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-5cdc645687-t4hw4" Dec 06 15:37:52 crc kubenswrapper[5003]: I1206 15:37:52.509506 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-5cdc645687-t4hw4"] Dec 06 15:37:52 crc kubenswrapper[5003]: I1206 15:37:52.517771 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-5cdc645687-t4hw4"] Dec 06 15:37:53 crc kubenswrapper[5003]: I1206 15:37:53.475146 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-d5469898f-2n2g6" event={"ID":"c333c360-f353-4c22-a612-fd29256636c2","Type":"ContainerStarted","Data":"c1ca7f61796b9aab6876f036059f25820ad94c376250231c9eda52af3465dc2e"} Dec 06 15:37:53 crc kubenswrapper[5003]: I1206 15:37:53.475561 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-d5469898f-2n2g6" Dec 06 15:37:53 crc kubenswrapper[5003]: I1206 15:37:53.476798 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-85d49d74df-q57m7" event={"ID":"b0111cb9-ae42-4dde-909d-e4b63a81e134","Type":"ContainerStarted","Data":"58705bcbf8a0a3b1dc2c86aabb3ae259016699ae9136a8a0ef61a92ebd9aa21d"} Dec 06 15:37:53 crc kubenswrapper[5003]: I1206 15:37:53.477165 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-85d49d74df-q57m7" Dec 06 15:37:53 crc kubenswrapper[5003]: I1206 15:37:53.479575 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-d5469898f-2n2g6" Dec 06 15:37:53 crc kubenswrapper[5003]: I1206 15:37:53.481407 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-85d49d74df-q57m7" Dec 06 15:37:53 crc kubenswrapper[5003]: I1206 15:37:53.511025 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-d5469898f-2n2g6" podStartSLOduration=3.51100628 podStartE2EDuration="3.51100628s" podCreationTimestamp="2025-12-06 15:37:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 15:37:53.495780626 +0000 UTC m=+352.029135027" watchObservedRunningTime="2025-12-06 15:37:53.51100628 +0000 UTC m=+352.044360671" Dec 06 15:37:53 crc kubenswrapper[5003]: I1206 15:37:53.511741 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-85d49d74df-q57m7" podStartSLOduration=3.511737031 podStartE2EDuration="3.511737031s" podCreationTimestamp="2025-12-06 15:37:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 15:37:53.508650321 +0000 UTC m=+352.042004722" watchObservedRunningTime="2025-12-06 15:37:53.511737031 +0000 UTC m=+352.045091412" Dec 06 15:37:53 crc kubenswrapper[5003]: I1206 15:37:53.719374 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="82e86789-d814-41aa-9e8a-905e56aaed34" path="/var/lib/kubelet/pods/82e86789-d814-41aa-9e8a-905e56aaed34/volumes" Dec 06 15:38:00 crc kubenswrapper[5003]: I1206 15:38:00.441626 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-d5469898f-2n2g6"] Dec 06 15:38:00 crc kubenswrapper[5003]: I1206 15:38:00.442363 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-d5469898f-2n2g6" podUID="c333c360-f353-4c22-a612-fd29256636c2" containerName="controller-manager" containerID="cri-o://c1ca7f61796b9aab6876f036059f25820ad94c376250231c9eda52af3465dc2e" gracePeriod=30 Dec 06 15:38:01 crc kubenswrapper[5003]: I1206 15:38:01.157174 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-d5469898f-2n2g6" Dec 06 15:38:01 crc kubenswrapper[5003]: I1206 15:38:01.306151 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c333c360-f353-4c22-a612-fd29256636c2-config\") pod \"c333c360-f353-4c22-a612-fd29256636c2\" (UID: \"c333c360-f353-4c22-a612-fd29256636c2\") " Dec 06 15:38:01 crc kubenswrapper[5003]: I1206 15:38:01.306258 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zwd2m\" (UniqueName: \"kubernetes.io/projected/c333c360-f353-4c22-a612-fd29256636c2-kube-api-access-zwd2m\") pod \"c333c360-f353-4c22-a612-fd29256636c2\" (UID: \"c333c360-f353-4c22-a612-fd29256636c2\") " Dec 06 15:38:01 crc kubenswrapper[5003]: I1206 15:38:01.306322 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c333c360-f353-4c22-a612-fd29256636c2-serving-cert\") pod \"c333c360-f353-4c22-a612-fd29256636c2\" (UID: \"c333c360-f353-4c22-a612-fd29256636c2\") " Dec 06 15:38:01 crc kubenswrapper[5003]: I1206 15:38:01.306372 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/c333c360-f353-4c22-a612-fd29256636c2-client-ca\") pod \"c333c360-f353-4c22-a612-fd29256636c2\" (UID: \"c333c360-f353-4c22-a612-fd29256636c2\") " Dec 06 15:38:01 crc kubenswrapper[5003]: I1206 15:38:01.306405 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/c333c360-f353-4c22-a612-fd29256636c2-proxy-ca-bundles\") pod \"c333c360-f353-4c22-a612-fd29256636c2\" (UID: \"c333c360-f353-4c22-a612-fd29256636c2\") " Dec 06 15:38:01 crc kubenswrapper[5003]: I1206 15:38:01.307157 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c333c360-f353-4c22-a612-fd29256636c2-client-ca" (OuterVolumeSpecName: "client-ca") pod "c333c360-f353-4c22-a612-fd29256636c2" (UID: "c333c360-f353-4c22-a612-fd29256636c2"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 15:38:01 crc kubenswrapper[5003]: I1206 15:38:01.307193 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c333c360-f353-4c22-a612-fd29256636c2-config" (OuterVolumeSpecName: "config") pod "c333c360-f353-4c22-a612-fd29256636c2" (UID: "c333c360-f353-4c22-a612-fd29256636c2"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 15:38:01 crc kubenswrapper[5003]: I1206 15:38:01.307275 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c333c360-f353-4c22-a612-fd29256636c2-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "c333c360-f353-4c22-a612-fd29256636c2" (UID: "c333c360-f353-4c22-a612-fd29256636c2"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 15:38:01 crc kubenswrapper[5003]: I1206 15:38:01.312853 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c333c360-f353-4c22-a612-fd29256636c2-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "c333c360-f353-4c22-a612-fd29256636c2" (UID: "c333c360-f353-4c22-a612-fd29256636c2"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 15:38:01 crc kubenswrapper[5003]: I1206 15:38:01.313729 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c333c360-f353-4c22-a612-fd29256636c2-kube-api-access-zwd2m" (OuterVolumeSpecName: "kube-api-access-zwd2m") pod "c333c360-f353-4c22-a612-fd29256636c2" (UID: "c333c360-f353-4c22-a612-fd29256636c2"). InnerVolumeSpecName "kube-api-access-zwd2m". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 15:38:01 crc kubenswrapper[5003]: I1206 15:38:01.407656 5003 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c333c360-f353-4c22-a612-fd29256636c2-config\") on node \"crc\" DevicePath \"\"" Dec 06 15:38:01 crc kubenswrapper[5003]: I1206 15:38:01.407698 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zwd2m\" (UniqueName: \"kubernetes.io/projected/c333c360-f353-4c22-a612-fd29256636c2-kube-api-access-zwd2m\") on node \"crc\" DevicePath \"\"" Dec 06 15:38:01 crc kubenswrapper[5003]: I1206 15:38:01.407709 5003 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c333c360-f353-4c22-a612-fd29256636c2-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 06 15:38:01 crc kubenswrapper[5003]: I1206 15:38:01.407718 5003 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/c333c360-f353-4c22-a612-fd29256636c2-client-ca\") on node \"crc\" DevicePath \"\"" Dec 06 15:38:01 crc kubenswrapper[5003]: I1206 15:38:01.407726 5003 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/c333c360-f353-4c22-a612-fd29256636c2-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 06 15:38:01 crc kubenswrapper[5003]: I1206 15:38:01.520959 5003 generic.go:334] "Generic (PLEG): container finished" podID="c333c360-f353-4c22-a612-fd29256636c2" containerID="c1ca7f61796b9aab6876f036059f25820ad94c376250231c9eda52af3465dc2e" exitCode=0 Dec 06 15:38:01 crc kubenswrapper[5003]: I1206 15:38:01.521012 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-d5469898f-2n2g6" event={"ID":"c333c360-f353-4c22-a612-fd29256636c2","Type":"ContainerDied","Data":"c1ca7f61796b9aab6876f036059f25820ad94c376250231c9eda52af3465dc2e"} Dec 06 15:38:01 crc kubenswrapper[5003]: I1206 15:38:01.521043 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-d5469898f-2n2g6" event={"ID":"c333c360-f353-4c22-a612-fd29256636c2","Type":"ContainerDied","Data":"af5aa63fb738cee5c9285e8669ff05da15c68342e1dc34c6f73bc6cdfa7ccbb6"} Dec 06 15:38:01 crc kubenswrapper[5003]: I1206 15:38:01.521065 5003 scope.go:117] "RemoveContainer" containerID="c1ca7f61796b9aab6876f036059f25820ad94c376250231c9eda52af3465dc2e" Dec 06 15:38:01 crc kubenswrapper[5003]: I1206 15:38:01.521200 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-d5469898f-2n2g6" Dec 06 15:38:01 crc kubenswrapper[5003]: I1206 15:38:01.544172 5003 scope.go:117] "RemoveContainer" containerID="c1ca7f61796b9aab6876f036059f25820ad94c376250231c9eda52af3465dc2e" Dec 06 15:38:01 crc kubenswrapper[5003]: E1206 15:38:01.544711 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c1ca7f61796b9aab6876f036059f25820ad94c376250231c9eda52af3465dc2e\": container with ID starting with c1ca7f61796b9aab6876f036059f25820ad94c376250231c9eda52af3465dc2e not found: ID does not exist" containerID="c1ca7f61796b9aab6876f036059f25820ad94c376250231c9eda52af3465dc2e" Dec 06 15:38:01 crc kubenswrapper[5003]: I1206 15:38:01.544754 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c1ca7f61796b9aab6876f036059f25820ad94c376250231c9eda52af3465dc2e"} err="failed to get container status \"c1ca7f61796b9aab6876f036059f25820ad94c376250231c9eda52af3465dc2e\": rpc error: code = NotFound desc = could not find container \"c1ca7f61796b9aab6876f036059f25820ad94c376250231c9eda52af3465dc2e\": container with ID starting with c1ca7f61796b9aab6876f036059f25820ad94c376250231c9eda52af3465dc2e not found: ID does not exist" Dec 06 15:38:01 crc kubenswrapper[5003]: I1206 15:38:01.551541 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-d5469898f-2n2g6"] Dec 06 15:38:01 crc kubenswrapper[5003]: I1206 15:38:01.558625 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-d5469898f-2n2g6"] Dec 06 15:38:01 crc kubenswrapper[5003]: I1206 15:38:01.720607 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c333c360-f353-4c22-a612-fd29256636c2" path="/var/lib/kubelet/pods/c333c360-f353-4c22-a612-fd29256636c2/volumes" Dec 06 15:38:01 crc kubenswrapper[5003]: I1206 15:38:01.783849 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-5cdc645687-56trj"] Dec 06 15:38:01 crc kubenswrapper[5003]: E1206 15:38:01.784125 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c333c360-f353-4c22-a612-fd29256636c2" containerName="controller-manager" Dec 06 15:38:01 crc kubenswrapper[5003]: I1206 15:38:01.784150 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="c333c360-f353-4c22-a612-fd29256636c2" containerName="controller-manager" Dec 06 15:38:01 crc kubenswrapper[5003]: I1206 15:38:01.784264 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="c333c360-f353-4c22-a612-fd29256636c2" containerName="controller-manager" Dec 06 15:38:01 crc kubenswrapper[5003]: I1206 15:38:01.784745 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-5cdc645687-56trj" Dec 06 15:38:01 crc kubenswrapper[5003]: I1206 15:38:01.788540 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 06 15:38:01 crc kubenswrapper[5003]: I1206 15:38:01.789370 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 06 15:38:01 crc kubenswrapper[5003]: I1206 15:38:01.789452 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 06 15:38:01 crc kubenswrapper[5003]: I1206 15:38:01.789952 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 06 15:38:01 crc kubenswrapper[5003]: I1206 15:38:01.790069 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 06 15:38:01 crc kubenswrapper[5003]: I1206 15:38:01.797596 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 06 15:38:01 crc kubenswrapper[5003]: I1206 15:38:01.801130 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 06 15:38:01 crc kubenswrapper[5003]: I1206 15:38:01.812235 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/be806872-2222-40f1-ad1a-e1af34bc6d0a-proxy-ca-bundles\") pod \"controller-manager-5cdc645687-56trj\" (UID: \"be806872-2222-40f1-ad1a-e1af34bc6d0a\") " pod="openshift-controller-manager/controller-manager-5cdc645687-56trj" Dec 06 15:38:01 crc kubenswrapper[5003]: I1206 15:38:01.812319 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/be806872-2222-40f1-ad1a-e1af34bc6d0a-serving-cert\") pod \"controller-manager-5cdc645687-56trj\" (UID: \"be806872-2222-40f1-ad1a-e1af34bc6d0a\") " pod="openshift-controller-manager/controller-manager-5cdc645687-56trj" Dec 06 15:38:01 crc kubenswrapper[5003]: I1206 15:38:01.812353 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/be806872-2222-40f1-ad1a-e1af34bc6d0a-client-ca\") pod \"controller-manager-5cdc645687-56trj\" (UID: \"be806872-2222-40f1-ad1a-e1af34bc6d0a\") " pod="openshift-controller-manager/controller-manager-5cdc645687-56trj" Dec 06 15:38:01 crc kubenswrapper[5003]: I1206 15:38:01.812377 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/be806872-2222-40f1-ad1a-e1af34bc6d0a-config\") pod \"controller-manager-5cdc645687-56trj\" (UID: \"be806872-2222-40f1-ad1a-e1af34bc6d0a\") " pod="openshift-controller-manager/controller-manager-5cdc645687-56trj" Dec 06 15:38:01 crc kubenswrapper[5003]: I1206 15:38:01.812405 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cvd45\" (UniqueName: \"kubernetes.io/projected/be806872-2222-40f1-ad1a-e1af34bc6d0a-kube-api-access-cvd45\") pod \"controller-manager-5cdc645687-56trj\" (UID: \"be806872-2222-40f1-ad1a-e1af34bc6d0a\") " pod="openshift-controller-manager/controller-manager-5cdc645687-56trj" Dec 06 15:38:01 crc kubenswrapper[5003]: I1206 15:38:01.839819 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-5cdc645687-56trj"] Dec 06 15:38:01 crc kubenswrapper[5003]: I1206 15:38:01.913067 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/be806872-2222-40f1-ad1a-e1af34bc6d0a-proxy-ca-bundles\") pod \"controller-manager-5cdc645687-56trj\" (UID: \"be806872-2222-40f1-ad1a-e1af34bc6d0a\") " pod="openshift-controller-manager/controller-manager-5cdc645687-56trj" Dec 06 15:38:01 crc kubenswrapper[5003]: I1206 15:38:01.913137 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/be806872-2222-40f1-ad1a-e1af34bc6d0a-serving-cert\") pod \"controller-manager-5cdc645687-56trj\" (UID: \"be806872-2222-40f1-ad1a-e1af34bc6d0a\") " pod="openshift-controller-manager/controller-manager-5cdc645687-56trj" Dec 06 15:38:01 crc kubenswrapper[5003]: I1206 15:38:01.913182 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/be806872-2222-40f1-ad1a-e1af34bc6d0a-client-ca\") pod \"controller-manager-5cdc645687-56trj\" (UID: \"be806872-2222-40f1-ad1a-e1af34bc6d0a\") " pod="openshift-controller-manager/controller-manager-5cdc645687-56trj" Dec 06 15:38:01 crc kubenswrapper[5003]: I1206 15:38:01.913204 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/be806872-2222-40f1-ad1a-e1af34bc6d0a-config\") pod \"controller-manager-5cdc645687-56trj\" (UID: \"be806872-2222-40f1-ad1a-e1af34bc6d0a\") " pod="openshift-controller-manager/controller-manager-5cdc645687-56trj" Dec 06 15:38:01 crc kubenswrapper[5003]: I1206 15:38:01.913235 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cvd45\" (UniqueName: \"kubernetes.io/projected/be806872-2222-40f1-ad1a-e1af34bc6d0a-kube-api-access-cvd45\") pod \"controller-manager-5cdc645687-56trj\" (UID: \"be806872-2222-40f1-ad1a-e1af34bc6d0a\") " pod="openshift-controller-manager/controller-manager-5cdc645687-56trj" Dec 06 15:38:01 crc kubenswrapper[5003]: I1206 15:38:01.914152 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/be806872-2222-40f1-ad1a-e1af34bc6d0a-client-ca\") pod \"controller-manager-5cdc645687-56trj\" (UID: \"be806872-2222-40f1-ad1a-e1af34bc6d0a\") " pod="openshift-controller-manager/controller-manager-5cdc645687-56trj" Dec 06 15:38:01 crc kubenswrapper[5003]: I1206 15:38:01.914259 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/be806872-2222-40f1-ad1a-e1af34bc6d0a-proxy-ca-bundles\") pod \"controller-manager-5cdc645687-56trj\" (UID: \"be806872-2222-40f1-ad1a-e1af34bc6d0a\") " pod="openshift-controller-manager/controller-manager-5cdc645687-56trj" Dec 06 15:38:01 crc kubenswrapper[5003]: I1206 15:38:01.915112 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/be806872-2222-40f1-ad1a-e1af34bc6d0a-config\") pod \"controller-manager-5cdc645687-56trj\" (UID: \"be806872-2222-40f1-ad1a-e1af34bc6d0a\") " pod="openshift-controller-manager/controller-manager-5cdc645687-56trj" Dec 06 15:38:01 crc kubenswrapper[5003]: I1206 15:38:01.917144 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/be806872-2222-40f1-ad1a-e1af34bc6d0a-serving-cert\") pod \"controller-manager-5cdc645687-56trj\" (UID: \"be806872-2222-40f1-ad1a-e1af34bc6d0a\") " pod="openshift-controller-manager/controller-manager-5cdc645687-56trj" Dec 06 15:38:01 crc kubenswrapper[5003]: I1206 15:38:01.935063 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cvd45\" (UniqueName: \"kubernetes.io/projected/be806872-2222-40f1-ad1a-e1af34bc6d0a-kube-api-access-cvd45\") pod \"controller-manager-5cdc645687-56trj\" (UID: \"be806872-2222-40f1-ad1a-e1af34bc6d0a\") " pod="openshift-controller-manager/controller-manager-5cdc645687-56trj" Dec 06 15:38:02 crc kubenswrapper[5003]: I1206 15:38:02.099068 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-5cdc645687-56trj" Dec 06 15:38:02 crc kubenswrapper[5003]: I1206 15:38:02.366873 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-5cdc645687-56trj"] Dec 06 15:38:02 crc kubenswrapper[5003]: I1206 15:38:02.525752 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-5cdc645687-56trj" event={"ID":"be806872-2222-40f1-ad1a-e1af34bc6d0a","Type":"ContainerStarted","Data":"439d028c21498f64f5774bcd782718581e27be18c690c22e06755a517ee5b937"} Dec 06 15:38:02 crc kubenswrapper[5003]: I1206 15:38:02.526059 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-5cdc645687-56trj" event={"ID":"be806872-2222-40f1-ad1a-e1af34bc6d0a","Type":"ContainerStarted","Data":"73c5fcf5657a0f5655ea73ddee93aba7db24d38f29510376800f1cb281cd22f7"} Dec 06 15:38:02 crc kubenswrapper[5003]: I1206 15:38:02.526380 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-5cdc645687-56trj" Dec 06 15:38:02 crc kubenswrapper[5003]: I1206 15:38:02.527351 5003 patch_prober.go:28] interesting pod/controller-manager-5cdc645687-56trj container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.62:8443/healthz\": dial tcp 10.217.0.62:8443: connect: connection refused" start-of-body= Dec 06 15:38:02 crc kubenswrapper[5003]: I1206 15:38:02.527396 5003 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-5cdc645687-56trj" podUID="be806872-2222-40f1-ad1a-e1af34bc6d0a" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.62:8443/healthz\": dial tcp 10.217.0.62:8443: connect: connection refused" Dec 06 15:38:02 crc kubenswrapper[5003]: I1206 15:38:02.574181 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-5cdc645687-56trj" podStartSLOduration=2.574163292 podStartE2EDuration="2.574163292s" podCreationTimestamp="2025-12-06 15:38:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 15:38:02.572752311 +0000 UTC m=+361.106106702" watchObservedRunningTime="2025-12-06 15:38:02.574163292 +0000 UTC m=+361.107517673" Dec 06 15:38:03 crc kubenswrapper[5003]: I1206 15:38:03.534422 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-5cdc645687-56trj" Dec 06 15:38:18 crc kubenswrapper[5003]: I1206 15:38:18.573184 5003 patch_prober.go:28] interesting pod/machine-config-daemon-w25db container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 06 15:38:18 crc kubenswrapper[5003]: I1206 15:38:18.573870 5003 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-w25db" podUID="1a047c4d-003e-4668-9b96-945eab34ab68" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 06 15:38:22 crc kubenswrapper[5003]: I1206 15:38:22.415403 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-bdwlv"] Dec 06 15:38:22 crc kubenswrapper[5003]: I1206 15:38:22.416541 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-bdwlv" Dec 06 15:38:22 crc kubenswrapper[5003]: I1206 15:38:22.448424 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-bdwlv"] Dec 06 15:38:22 crc kubenswrapper[5003]: I1206 15:38:22.589146 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/f36ea96a-d34e-4470-89e3-fc99588e31ae-bound-sa-token\") pod \"image-registry-66df7c8f76-bdwlv\" (UID: \"f36ea96a-d34e-4470-89e3-fc99588e31ae\") " pod="openshift-image-registry/image-registry-66df7c8f76-bdwlv" Dec 06 15:38:22 crc kubenswrapper[5003]: I1206 15:38:22.589208 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/f36ea96a-d34e-4470-89e3-fc99588e31ae-registry-tls\") pod \"image-registry-66df7c8f76-bdwlv\" (UID: \"f36ea96a-d34e-4470-89e3-fc99588e31ae\") " pod="openshift-image-registry/image-registry-66df7c8f76-bdwlv" Dec 06 15:38:22 crc kubenswrapper[5003]: I1206 15:38:22.589258 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/f36ea96a-d34e-4470-89e3-fc99588e31ae-registry-certificates\") pod \"image-registry-66df7c8f76-bdwlv\" (UID: \"f36ea96a-d34e-4470-89e3-fc99588e31ae\") " pod="openshift-image-registry/image-registry-66df7c8f76-bdwlv" Dec 06 15:38:22 crc kubenswrapper[5003]: I1206 15:38:22.589281 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c62bw\" (UniqueName: \"kubernetes.io/projected/f36ea96a-d34e-4470-89e3-fc99588e31ae-kube-api-access-c62bw\") pod \"image-registry-66df7c8f76-bdwlv\" (UID: \"f36ea96a-d34e-4470-89e3-fc99588e31ae\") " pod="openshift-image-registry/image-registry-66df7c8f76-bdwlv" Dec 06 15:38:22 crc kubenswrapper[5003]: I1206 15:38:22.589300 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/f36ea96a-d34e-4470-89e3-fc99588e31ae-installation-pull-secrets\") pod \"image-registry-66df7c8f76-bdwlv\" (UID: \"f36ea96a-d34e-4470-89e3-fc99588e31ae\") " pod="openshift-image-registry/image-registry-66df7c8f76-bdwlv" Dec 06 15:38:22 crc kubenswrapper[5003]: I1206 15:38:22.589320 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/f36ea96a-d34e-4470-89e3-fc99588e31ae-ca-trust-extracted\") pod \"image-registry-66df7c8f76-bdwlv\" (UID: \"f36ea96a-d34e-4470-89e3-fc99588e31ae\") " pod="openshift-image-registry/image-registry-66df7c8f76-bdwlv" Dec 06 15:38:22 crc kubenswrapper[5003]: I1206 15:38:22.589335 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/f36ea96a-d34e-4470-89e3-fc99588e31ae-trusted-ca\") pod \"image-registry-66df7c8f76-bdwlv\" (UID: \"f36ea96a-d34e-4470-89e3-fc99588e31ae\") " pod="openshift-image-registry/image-registry-66df7c8f76-bdwlv" Dec 06 15:38:22 crc kubenswrapper[5003]: I1206 15:38:22.589415 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-bdwlv\" (UID: \"f36ea96a-d34e-4470-89e3-fc99588e31ae\") " pod="openshift-image-registry/image-registry-66df7c8f76-bdwlv" Dec 06 15:38:22 crc kubenswrapper[5003]: I1206 15:38:22.617459 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-bdwlv\" (UID: \"f36ea96a-d34e-4470-89e3-fc99588e31ae\") " pod="openshift-image-registry/image-registry-66df7c8f76-bdwlv" Dec 06 15:38:22 crc kubenswrapper[5003]: I1206 15:38:22.690885 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/f36ea96a-d34e-4470-89e3-fc99588e31ae-registry-tls\") pod \"image-registry-66df7c8f76-bdwlv\" (UID: \"f36ea96a-d34e-4470-89e3-fc99588e31ae\") " pod="openshift-image-registry/image-registry-66df7c8f76-bdwlv" Dec 06 15:38:22 crc kubenswrapper[5003]: I1206 15:38:22.690957 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/f36ea96a-d34e-4470-89e3-fc99588e31ae-registry-certificates\") pod \"image-registry-66df7c8f76-bdwlv\" (UID: \"f36ea96a-d34e-4470-89e3-fc99588e31ae\") " pod="openshift-image-registry/image-registry-66df7c8f76-bdwlv" Dec 06 15:38:22 crc kubenswrapper[5003]: I1206 15:38:22.690988 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c62bw\" (UniqueName: \"kubernetes.io/projected/f36ea96a-d34e-4470-89e3-fc99588e31ae-kube-api-access-c62bw\") pod \"image-registry-66df7c8f76-bdwlv\" (UID: \"f36ea96a-d34e-4470-89e3-fc99588e31ae\") " pod="openshift-image-registry/image-registry-66df7c8f76-bdwlv" Dec 06 15:38:22 crc kubenswrapper[5003]: I1206 15:38:22.691008 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/f36ea96a-d34e-4470-89e3-fc99588e31ae-installation-pull-secrets\") pod \"image-registry-66df7c8f76-bdwlv\" (UID: \"f36ea96a-d34e-4470-89e3-fc99588e31ae\") " pod="openshift-image-registry/image-registry-66df7c8f76-bdwlv" Dec 06 15:38:22 crc kubenswrapper[5003]: I1206 15:38:22.691025 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/f36ea96a-d34e-4470-89e3-fc99588e31ae-ca-trust-extracted\") pod \"image-registry-66df7c8f76-bdwlv\" (UID: \"f36ea96a-d34e-4470-89e3-fc99588e31ae\") " pod="openshift-image-registry/image-registry-66df7c8f76-bdwlv" Dec 06 15:38:22 crc kubenswrapper[5003]: I1206 15:38:22.691070 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/f36ea96a-d34e-4470-89e3-fc99588e31ae-trusted-ca\") pod \"image-registry-66df7c8f76-bdwlv\" (UID: \"f36ea96a-d34e-4470-89e3-fc99588e31ae\") " pod="openshift-image-registry/image-registry-66df7c8f76-bdwlv" Dec 06 15:38:22 crc kubenswrapper[5003]: I1206 15:38:22.691109 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/f36ea96a-d34e-4470-89e3-fc99588e31ae-bound-sa-token\") pod \"image-registry-66df7c8f76-bdwlv\" (UID: \"f36ea96a-d34e-4470-89e3-fc99588e31ae\") " pod="openshift-image-registry/image-registry-66df7c8f76-bdwlv" Dec 06 15:38:22 crc kubenswrapper[5003]: I1206 15:38:22.691924 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/f36ea96a-d34e-4470-89e3-fc99588e31ae-ca-trust-extracted\") pod \"image-registry-66df7c8f76-bdwlv\" (UID: \"f36ea96a-d34e-4470-89e3-fc99588e31ae\") " pod="openshift-image-registry/image-registry-66df7c8f76-bdwlv" Dec 06 15:38:22 crc kubenswrapper[5003]: I1206 15:38:22.693023 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/f36ea96a-d34e-4470-89e3-fc99588e31ae-registry-certificates\") pod \"image-registry-66df7c8f76-bdwlv\" (UID: \"f36ea96a-d34e-4470-89e3-fc99588e31ae\") " pod="openshift-image-registry/image-registry-66df7c8f76-bdwlv" Dec 06 15:38:22 crc kubenswrapper[5003]: I1206 15:38:22.693541 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/f36ea96a-d34e-4470-89e3-fc99588e31ae-trusted-ca\") pod \"image-registry-66df7c8f76-bdwlv\" (UID: \"f36ea96a-d34e-4470-89e3-fc99588e31ae\") " pod="openshift-image-registry/image-registry-66df7c8f76-bdwlv" Dec 06 15:38:22 crc kubenswrapper[5003]: I1206 15:38:22.696685 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/f36ea96a-d34e-4470-89e3-fc99588e31ae-registry-tls\") pod \"image-registry-66df7c8f76-bdwlv\" (UID: \"f36ea96a-d34e-4470-89e3-fc99588e31ae\") " pod="openshift-image-registry/image-registry-66df7c8f76-bdwlv" Dec 06 15:38:22 crc kubenswrapper[5003]: I1206 15:38:22.698606 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/f36ea96a-d34e-4470-89e3-fc99588e31ae-installation-pull-secrets\") pod \"image-registry-66df7c8f76-bdwlv\" (UID: \"f36ea96a-d34e-4470-89e3-fc99588e31ae\") " pod="openshift-image-registry/image-registry-66df7c8f76-bdwlv" Dec 06 15:38:22 crc kubenswrapper[5003]: I1206 15:38:22.709137 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/f36ea96a-d34e-4470-89e3-fc99588e31ae-bound-sa-token\") pod \"image-registry-66df7c8f76-bdwlv\" (UID: \"f36ea96a-d34e-4470-89e3-fc99588e31ae\") " pod="openshift-image-registry/image-registry-66df7c8f76-bdwlv" Dec 06 15:38:22 crc kubenswrapper[5003]: I1206 15:38:22.711726 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c62bw\" (UniqueName: \"kubernetes.io/projected/f36ea96a-d34e-4470-89e3-fc99588e31ae-kube-api-access-c62bw\") pod \"image-registry-66df7c8f76-bdwlv\" (UID: \"f36ea96a-d34e-4470-89e3-fc99588e31ae\") " pod="openshift-image-registry/image-registry-66df7c8f76-bdwlv" Dec 06 15:38:22 crc kubenswrapper[5003]: I1206 15:38:22.742744 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-bdwlv" Dec 06 15:38:23 crc kubenswrapper[5003]: I1206 15:38:23.283314 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-bdwlv"] Dec 06 15:38:23 crc kubenswrapper[5003]: I1206 15:38:23.647017 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-bdwlv" event={"ID":"f36ea96a-d34e-4470-89e3-fc99588e31ae","Type":"ContainerStarted","Data":"5fab926efcf6ed7d31b2e4a7125a8c232369b9c6a617b1fe89148825c5e768d7"} Dec 06 15:38:23 crc kubenswrapper[5003]: I1206 15:38:23.648299 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-66df7c8f76-bdwlv" Dec 06 15:38:23 crc kubenswrapper[5003]: I1206 15:38:23.648372 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-bdwlv" event={"ID":"f36ea96a-d34e-4470-89e3-fc99588e31ae","Type":"ContainerStarted","Data":"ae2bd61dd5452b3dcd1453f4ae06cd5bb1f97c44e9e32de7b592b978597499ed"} Dec 06 15:38:23 crc kubenswrapper[5003]: I1206 15:38:23.663544 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-66df7c8f76-bdwlv" podStartSLOduration=1.663520957 podStartE2EDuration="1.663520957s" podCreationTimestamp="2025-12-06 15:38:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 15:38:23.661112407 +0000 UTC m=+382.194466808" watchObservedRunningTime="2025-12-06 15:38:23.663520957 +0000 UTC m=+382.196875338" Dec 06 15:38:32 crc kubenswrapper[5003]: I1206 15:38:32.992629 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-85d49d74df-q57m7"] Dec 06 15:38:32 crc kubenswrapper[5003]: I1206 15:38:32.993462 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-85d49d74df-q57m7" podUID="b0111cb9-ae42-4dde-909d-e4b63a81e134" containerName="route-controller-manager" containerID="cri-o://58705bcbf8a0a3b1dc2c86aabb3ae259016699ae9136a8a0ef61a92ebd9aa21d" gracePeriod=30 Dec 06 15:38:33 crc kubenswrapper[5003]: I1206 15:38:33.700195 5003 generic.go:334] "Generic (PLEG): container finished" podID="b0111cb9-ae42-4dde-909d-e4b63a81e134" containerID="58705bcbf8a0a3b1dc2c86aabb3ae259016699ae9136a8a0ef61a92ebd9aa21d" exitCode=0 Dec 06 15:38:33 crc kubenswrapper[5003]: I1206 15:38:33.700306 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-85d49d74df-q57m7" event={"ID":"b0111cb9-ae42-4dde-909d-e4b63a81e134","Type":"ContainerDied","Data":"58705bcbf8a0a3b1dc2c86aabb3ae259016699ae9136a8a0ef61a92ebd9aa21d"} Dec 06 15:38:33 crc kubenswrapper[5003]: I1206 15:38:33.953998 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-85d49d74df-q57m7" Dec 06 15:38:34 crc kubenswrapper[5003]: I1206 15:38:34.051403 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b0111cb9-ae42-4dde-909d-e4b63a81e134-config\") pod \"b0111cb9-ae42-4dde-909d-e4b63a81e134\" (UID: \"b0111cb9-ae42-4dde-909d-e4b63a81e134\") " Dec 06 15:38:34 crc kubenswrapper[5003]: I1206 15:38:34.051507 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8862n\" (UniqueName: \"kubernetes.io/projected/b0111cb9-ae42-4dde-909d-e4b63a81e134-kube-api-access-8862n\") pod \"b0111cb9-ae42-4dde-909d-e4b63a81e134\" (UID: \"b0111cb9-ae42-4dde-909d-e4b63a81e134\") " Dec 06 15:38:34 crc kubenswrapper[5003]: I1206 15:38:34.051528 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b0111cb9-ae42-4dde-909d-e4b63a81e134-serving-cert\") pod \"b0111cb9-ae42-4dde-909d-e4b63a81e134\" (UID: \"b0111cb9-ae42-4dde-909d-e4b63a81e134\") " Dec 06 15:38:34 crc kubenswrapper[5003]: I1206 15:38:34.051546 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/b0111cb9-ae42-4dde-909d-e4b63a81e134-client-ca\") pod \"b0111cb9-ae42-4dde-909d-e4b63a81e134\" (UID: \"b0111cb9-ae42-4dde-909d-e4b63a81e134\") " Dec 06 15:38:34 crc kubenswrapper[5003]: I1206 15:38:34.052353 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b0111cb9-ae42-4dde-909d-e4b63a81e134-client-ca" (OuterVolumeSpecName: "client-ca") pod "b0111cb9-ae42-4dde-909d-e4b63a81e134" (UID: "b0111cb9-ae42-4dde-909d-e4b63a81e134"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 15:38:34 crc kubenswrapper[5003]: I1206 15:38:34.052398 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b0111cb9-ae42-4dde-909d-e4b63a81e134-config" (OuterVolumeSpecName: "config") pod "b0111cb9-ae42-4dde-909d-e4b63a81e134" (UID: "b0111cb9-ae42-4dde-909d-e4b63a81e134"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 15:38:34 crc kubenswrapper[5003]: I1206 15:38:34.057060 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b0111cb9-ae42-4dde-909d-e4b63a81e134-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "b0111cb9-ae42-4dde-909d-e4b63a81e134" (UID: "b0111cb9-ae42-4dde-909d-e4b63a81e134"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 15:38:34 crc kubenswrapper[5003]: I1206 15:38:34.060795 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b0111cb9-ae42-4dde-909d-e4b63a81e134-kube-api-access-8862n" (OuterVolumeSpecName: "kube-api-access-8862n") pod "b0111cb9-ae42-4dde-909d-e4b63a81e134" (UID: "b0111cb9-ae42-4dde-909d-e4b63a81e134"). InnerVolumeSpecName "kube-api-access-8862n". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 15:38:34 crc kubenswrapper[5003]: I1206 15:38:34.152814 5003 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b0111cb9-ae42-4dde-909d-e4b63a81e134-config\") on node \"crc\" DevicePath \"\"" Dec 06 15:38:34 crc kubenswrapper[5003]: I1206 15:38:34.152850 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8862n\" (UniqueName: \"kubernetes.io/projected/b0111cb9-ae42-4dde-909d-e4b63a81e134-kube-api-access-8862n\") on node \"crc\" DevicePath \"\"" Dec 06 15:38:34 crc kubenswrapper[5003]: I1206 15:38:34.152864 5003 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/b0111cb9-ae42-4dde-909d-e4b63a81e134-client-ca\") on node \"crc\" DevicePath \"\"" Dec 06 15:38:34 crc kubenswrapper[5003]: I1206 15:38:34.152880 5003 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b0111cb9-ae42-4dde-909d-e4b63a81e134-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 06 15:38:34 crc kubenswrapper[5003]: I1206 15:38:34.709878 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-85d49d74df-q57m7" event={"ID":"b0111cb9-ae42-4dde-909d-e4b63a81e134","Type":"ContainerDied","Data":"ee96d57181969fcb3152e5c7676136c7e67d4d8fd8c8ecb7e9d105a74be87556"} Dec 06 15:38:34 crc kubenswrapper[5003]: I1206 15:38:34.709945 5003 scope.go:117] "RemoveContainer" containerID="58705bcbf8a0a3b1dc2c86aabb3ae259016699ae9136a8a0ef61a92ebd9aa21d" Dec 06 15:38:34 crc kubenswrapper[5003]: I1206 15:38:34.710194 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-85d49d74df-q57m7" Dec 06 15:38:34 crc kubenswrapper[5003]: I1206 15:38:34.752933 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-85d49d74df-q57m7"] Dec 06 15:38:34 crc kubenswrapper[5003]: I1206 15:38:34.756742 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-85d49d74df-q57m7"] Dec 06 15:38:34 crc kubenswrapper[5003]: I1206 15:38:34.804786 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-576498df55-2wlzq"] Dec 06 15:38:34 crc kubenswrapper[5003]: E1206 15:38:34.805054 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b0111cb9-ae42-4dde-909d-e4b63a81e134" containerName="route-controller-manager" Dec 06 15:38:34 crc kubenswrapper[5003]: I1206 15:38:34.805071 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="b0111cb9-ae42-4dde-909d-e4b63a81e134" containerName="route-controller-manager" Dec 06 15:38:34 crc kubenswrapper[5003]: I1206 15:38:34.805196 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="b0111cb9-ae42-4dde-909d-e4b63a81e134" containerName="route-controller-manager" Dec 06 15:38:34 crc kubenswrapper[5003]: I1206 15:38:34.805654 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-576498df55-2wlzq" Dec 06 15:38:34 crc kubenswrapper[5003]: I1206 15:38:34.807720 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Dec 06 15:38:34 crc kubenswrapper[5003]: I1206 15:38:34.808360 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Dec 06 15:38:34 crc kubenswrapper[5003]: I1206 15:38:34.810809 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Dec 06 15:38:34 crc kubenswrapper[5003]: I1206 15:38:34.810838 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Dec 06 15:38:34 crc kubenswrapper[5003]: I1206 15:38:34.811059 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Dec 06 15:38:34 crc kubenswrapper[5003]: I1206 15:38:34.811243 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Dec 06 15:38:34 crc kubenswrapper[5003]: I1206 15:38:34.815911 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-576498df55-2wlzq"] Dec 06 15:38:34 crc kubenswrapper[5003]: I1206 15:38:34.881151 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/744a91a5-3a43-4f87-a8f0-e0a4cf5222f0-serving-cert\") pod \"route-controller-manager-576498df55-2wlzq\" (UID: \"744a91a5-3a43-4f87-a8f0-e0a4cf5222f0\") " pod="openshift-route-controller-manager/route-controller-manager-576498df55-2wlzq" Dec 06 15:38:34 crc kubenswrapper[5003]: I1206 15:38:34.881202 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/744a91a5-3a43-4f87-a8f0-e0a4cf5222f0-client-ca\") pod \"route-controller-manager-576498df55-2wlzq\" (UID: \"744a91a5-3a43-4f87-a8f0-e0a4cf5222f0\") " pod="openshift-route-controller-manager/route-controller-manager-576498df55-2wlzq" Dec 06 15:38:34 crc kubenswrapper[5003]: I1206 15:38:34.881243 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/744a91a5-3a43-4f87-a8f0-e0a4cf5222f0-config\") pod \"route-controller-manager-576498df55-2wlzq\" (UID: \"744a91a5-3a43-4f87-a8f0-e0a4cf5222f0\") " pod="openshift-route-controller-manager/route-controller-manager-576498df55-2wlzq" Dec 06 15:38:34 crc kubenswrapper[5003]: I1206 15:38:34.881269 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r9mvn\" (UniqueName: \"kubernetes.io/projected/744a91a5-3a43-4f87-a8f0-e0a4cf5222f0-kube-api-access-r9mvn\") pod \"route-controller-manager-576498df55-2wlzq\" (UID: \"744a91a5-3a43-4f87-a8f0-e0a4cf5222f0\") " pod="openshift-route-controller-manager/route-controller-manager-576498df55-2wlzq" Dec 06 15:38:34 crc kubenswrapper[5003]: I1206 15:38:34.982733 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/744a91a5-3a43-4f87-a8f0-e0a4cf5222f0-serving-cert\") pod \"route-controller-manager-576498df55-2wlzq\" (UID: \"744a91a5-3a43-4f87-a8f0-e0a4cf5222f0\") " pod="openshift-route-controller-manager/route-controller-manager-576498df55-2wlzq" Dec 06 15:38:34 crc kubenswrapper[5003]: I1206 15:38:34.983068 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/744a91a5-3a43-4f87-a8f0-e0a4cf5222f0-client-ca\") pod \"route-controller-manager-576498df55-2wlzq\" (UID: \"744a91a5-3a43-4f87-a8f0-e0a4cf5222f0\") " pod="openshift-route-controller-manager/route-controller-manager-576498df55-2wlzq" Dec 06 15:38:34 crc kubenswrapper[5003]: I1206 15:38:34.983190 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/744a91a5-3a43-4f87-a8f0-e0a4cf5222f0-config\") pod \"route-controller-manager-576498df55-2wlzq\" (UID: \"744a91a5-3a43-4f87-a8f0-e0a4cf5222f0\") " pod="openshift-route-controller-manager/route-controller-manager-576498df55-2wlzq" Dec 06 15:38:34 crc kubenswrapper[5003]: I1206 15:38:34.983231 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r9mvn\" (UniqueName: \"kubernetes.io/projected/744a91a5-3a43-4f87-a8f0-e0a4cf5222f0-kube-api-access-r9mvn\") pod \"route-controller-manager-576498df55-2wlzq\" (UID: \"744a91a5-3a43-4f87-a8f0-e0a4cf5222f0\") " pod="openshift-route-controller-manager/route-controller-manager-576498df55-2wlzq" Dec 06 15:38:34 crc kubenswrapper[5003]: I1206 15:38:34.984410 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/744a91a5-3a43-4f87-a8f0-e0a4cf5222f0-config\") pod \"route-controller-manager-576498df55-2wlzq\" (UID: \"744a91a5-3a43-4f87-a8f0-e0a4cf5222f0\") " pod="openshift-route-controller-manager/route-controller-manager-576498df55-2wlzq" Dec 06 15:38:34 crc kubenswrapper[5003]: I1206 15:38:34.985107 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/744a91a5-3a43-4f87-a8f0-e0a4cf5222f0-client-ca\") pod \"route-controller-manager-576498df55-2wlzq\" (UID: \"744a91a5-3a43-4f87-a8f0-e0a4cf5222f0\") " pod="openshift-route-controller-manager/route-controller-manager-576498df55-2wlzq" Dec 06 15:38:34 crc kubenswrapper[5003]: I1206 15:38:34.991384 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/744a91a5-3a43-4f87-a8f0-e0a4cf5222f0-serving-cert\") pod \"route-controller-manager-576498df55-2wlzq\" (UID: \"744a91a5-3a43-4f87-a8f0-e0a4cf5222f0\") " pod="openshift-route-controller-manager/route-controller-manager-576498df55-2wlzq" Dec 06 15:38:35 crc kubenswrapper[5003]: I1206 15:38:35.000092 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r9mvn\" (UniqueName: \"kubernetes.io/projected/744a91a5-3a43-4f87-a8f0-e0a4cf5222f0-kube-api-access-r9mvn\") pod \"route-controller-manager-576498df55-2wlzq\" (UID: \"744a91a5-3a43-4f87-a8f0-e0a4cf5222f0\") " pod="openshift-route-controller-manager/route-controller-manager-576498df55-2wlzq" Dec 06 15:38:35 crc kubenswrapper[5003]: I1206 15:38:35.203353 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-576498df55-2wlzq" Dec 06 15:38:35 crc kubenswrapper[5003]: I1206 15:38:35.617335 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-576498df55-2wlzq"] Dec 06 15:38:35 crc kubenswrapper[5003]: I1206 15:38:35.721182 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b0111cb9-ae42-4dde-909d-e4b63a81e134" path="/var/lib/kubelet/pods/b0111cb9-ae42-4dde-909d-e4b63a81e134/volumes" Dec 06 15:38:35 crc kubenswrapper[5003]: I1206 15:38:35.722904 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-576498df55-2wlzq" event={"ID":"744a91a5-3a43-4f87-a8f0-e0a4cf5222f0","Type":"ContainerStarted","Data":"8e6d43e57942610c11388661be574471f0647d87555d19da5a1c2555df471394"} Dec 06 15:38:36 crc kubenswrapper[5003]: I1206 15:38:36.729798 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-576498df55-2wlzq" event={"ID":"744a91a5-3a43-4f87-a8f0-e0a4cf5222f0","Type":"ContainerStarted","Data":"d97fff13b888c1a79dcce91706be932f292eba6b3ec73c0c06b577c644b2a5df"} Dec 06 15:38:36 crc kubenswrapper[5003]: I1206 15:38:36.730044 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-576498df55-2wlzq" Dec 06 15:38:36 crc kubenswrapper[5003]: I1206 15:38:36.734294 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-576498df55-2wlzq" Dec 06 15:38:36 crc kubenswrapper[5003]: I1206 15:38:36.750711 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-576498df55-2wlzq" podStartSLOduration=4.750689412 podStartE2EDuration="4.750689412s" podCreationTimestamp="2025-12-06 15:38:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 15:38:36.748383225 +0000 UTC m=+395.281737616" watchObservedRunningTime="2025-12-06 15:38:36.750689412 +0000 UTC m=+395.284043793" Dec 06 15:38:42 crc kubenswrapper[5003]: I1206 15:38:42.748633 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-66df7c8f76-bdwlv" Dec 06 15:38:42 crc kubenswrapper[5003]: I1206 15:38:42.809133 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-tvwvh"] Dec 06 15:38:48 crc kubenswrapper[5003]: I1206 15:38:48.360130 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-jqxj7"] Dec 06 15:38:48 crc kubenswrapper[5003]: I1206 15:38:48.361061 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-jqxj7" podUID="dde2226a-d12b-4c3b-a396-cf72781488ca" containerName="registry-server" containerID="cri-o://e7a3ca9c9cad0d324fec3f7433c330d72ce5181cd384e7d96107998891afba2d" gracePeriod=30 Dec 06 15:38:48 crc kubenswrapper[5003]: I1206 15:38:48.377416 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-gtwdh"] Dec 06 15:38:48 crc kubenswrapper[5003]: I1206 15:38:48.377730 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-gtwdh" podUID="6c5a30dc-06ca-435f-81b9-576f03f05a19" containerName="registry-server" containerID="cri-o://e4b5fb7a09cbb912f18b48dddb3dffa5249d76b83fcfc068a3c747f22b2389a3" gracePeriod=30 Dec 06 15:38:48 crc kubenswrapper[5003]: I1206 15:38:48.387858 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-lwc4r"] Dec 06 15:38:48 crc kubenswrapper[5003]: I1206 15:38:48.388107 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/marketplace-operator-79b997595-lwc4r" podUID="765bb4a4-7c41-414b-a9be-a54be49b76ff" containerName="marketplace-operator" containerID="cri-o://980c7720cf8d72efde230240c7082200502a541160ff9e312d89bce6e9dae4b0" gracePeriod=30 Dec 06 15:38:48 crc kubenswrapper[5003]: I1206 15:38:48.400989 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-b5rnp"] Dec 06 15:38:48 crc kubenswrapper[5003]: I1206 15:38:48.401307 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-b5rnp" podUID="67506930-842d-411a-b032-26874042995d" containerName="registry-server" containerID="cri-o://4ecc296f8d469b13572f3088613c6f5307d365952b1d0bae6750bc39ee54f433" gracePeriod=30 Dec 06 15:38:48 crc kubenswrapper[5003]: I1206 15:38:48.415328 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-5fqrn"] Dec 06 15:38:48 crc kubenswrapper[5003]: I1206 15:38:48.416117 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-5fqrn" Dec 06 15:38:48 crc kubenswrapper[5003]: I1206 15:38:48.418745 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-mbw9t"] Dec 06 15:38:48 crc kubenswrapper[5003]: I1206 15:38:48.419026 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-mbw9t" podUID="7370c672-28ad-4228-8285-c113c6675ba8" containerName="registry-server" containerID="cri-o://a6e05613c5b9fca2c80befa1b17e35f07d84b3f182346f87fdc15363be8df4f4" gracePeriod=30 Dec 06 15:38:48 crc kubenswrapper[5003]: I1206 15:38:48.422019 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-5fqrn"] Dec 06 15:38:48 crc kubenswrapper[5003]: I1206 15:38:48.572873 5003 patch_prober.go:28] interesting pod/machine-config-daemon-w25db container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 06 15:38:48 crc kubenswrapper[5003]: I1206 15:38:48.572942 5003 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-w25db" podUID="1a047c4d-003e-4668-9b96-945eab34ab68" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 06 15:38:48 crc kubenswrapper[5003]: I1206 15:38:48.597236 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/4ae558d3-8724-4da4-bd37-89893945a2f3-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-5fqrn\" (UID: \"4ae558d3-8724-4da4-bd37-89893945a2f3\") " pod="openshift-marketplace/marketplace-operator-79b997595-5fqrn" Dec 06 15:38:48 crc kubenswrapper[5003]: I1206 15:38:48.597305 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/4ae558d3-8724-4da4-bd37-89893945a2f3-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-5fqrn\" (UID: \"4ae558d3-8724-4da4-bd37-89893945a2f3\") " pod="openshift-marketplace/marketplace-operator-79b997595-5fqrn" Dec 06 15:38:48 crc kubenswrapper[5003]: I1206 15:38:48.597404 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-65h57\" (UniqueName: \"kubernetes.io/projected/4ae558d3-8724-4da4-bd37-89893945a2f3-kube-api-access-65h57\") pod \"marketplace-operator-79b997595-5fqrn\" (UID: \"4ae558d3-8724-4da4-bd37-89893945a2f3\") " pod="openshift-marketplace/marketplace-operator-79b997595-5fqrn" Dec 06 15:38:48 crc kubenswrapper[5003]: I1206 15:38:48.699359 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-65h57\" (UniqueName: \"kubernetes.io/projected/4ae558d3-8724-4da4-bd37-89893945a2f3-kube-api-access-65h57\") pod \"marketplace-operator-79b997595-5fqrn\" (UID: \"4ae558d3-8724-4da4-bd37-89893945a2f3\") " pod="openshift-marketplace/marketplace-operator-79b997595-5fqrn" Dec 06 15:38:48 crc kubenswrapper[5003]: I1206 15:38:48.699454 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/4ae558d3-8724-4da4-bd37-89893945a2f3-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-5fqrn\" (UID: \"4ae558d3-8724-4da4-bd37-89893945a2f3\") " pod="openshift-marketplace/marketplace-operator-79b997595-5fqrn" Dec 06 15:38:48 crc kubenswrapper[5003]: I1206 15:38:48.699481 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/4ae558d3-8724-4da4-bd37-89893945a2f3-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-5fqrn\" (UID: \"4ae558d3-8724-4da4-bd37-89893945a2f3\") " pod="openshift-marketplace/marketplace-operator-79b997595-5fqrn" Dec 06 15:38:48 crc kubenswrapper[5003]: I1206 15:38:48.702356 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/4ae558d3-8724-4da4-bd37-89893945a2f3-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-5fqrn\" (UID: \"4ae558d3-8724-4da4-bd37-89893945a2f3\") " pod="openshift-marketplace/marketplace-operator-79b997595-5fqrn" Dec 06 15:38:48 crc kubenswrapper[5003]: I1206 15:38:48.711559 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/4ae558d3-8724-4da4-bd37-89893945a2f3-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-5fqrn\" (UID: \"4ae558d3-8724-4da4-bd37-89893945a2f3\") " pod="openshift-marketplace/marketplace-operator-79b997595-5fqrn" Dec 06 15:38:48 crc kubenswrapper[5003]: I1206 15:38:48.720773 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-65h57\" (UniqueName: \"kubernetes.io/projected/4ae558d3-8724-4da4-bd37-89893945a2f3-kube-api-access-65h57\") pod \"marketplace-operator-79b997595-5fqrn\" (UID: \"4ae558d3-8724-4da4-bd37-89893945a2f3\") " pod="openshift-marketplace/marketplace-operator-79b997595-5fqrn" Dec 06 15:38:48 crc kubenswrapper[5003]: I1206 15:38:48.761535 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-5fqrn" Dec 06 15:38:48 crc kubenswrapper[5003]: I1206 15:38:48.826809 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-jqxj7" Dec 06 15:38:48 crc kubenswrapper[5003]: I1206 15:38:48.860233 5003 generic.go:334] "Generic (PLEG): container finished" podID="765bb4a4-7c41-414b-a9be-a54be49b76ff" containerID="980c7720cf8d72efde230240c7082200502a541160ff9e312d89bce6e9dae4b0" exitCode=0 Dec 06 15:38:48 crc kubenswrapper[5003]: I1206 15:38:48.860354 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-lwc4r" event={"ID":"765bb4a4-7c41-414b-a9be-a54be49b76ff","Type":"ContainerDied","Data":"980c7720cf8d72efde230240c7082200502a541160ff9e312d89bce6e9dae4b0"} Dec 06 15:38:48 crc kubenswrapper[5003]: I1206 15:38:48.860430 5003 scope.go:117] "RemoveContainer" containerID="8983f81f3e009e763099d5f2f8745d5e96adc95d452468981c722677ee3c4f9d" Dec 06 15:38:48 crc kubenswrapper[5003]: I1206 15:38:48.863441 5003 generic.go:334] "Generic (PLEG): container finished" podID="67506930-842d-411a-b032-26874042995d" containerID="4ecc296f8d469b13572f3088613c6f5307d365952b1d0bae6750bc39ee54f433" exitCode=0 Dec 06 15:38:48 crc kubenswrapper[5003]: I1206 15:38:48.863543 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-b5rnp" event={"ID":"67506930-842d-411a-b032-26874042995d","Type":"ContainerDied","Data":"4ecc296f8d469b13572f3088613c6f5307d365952b1d0bae6750bc39ee54f433"} Dec 06 15:38:48 crc kubenswrapper[5003]: I1206 15:38:48.865687 5003 generic.go:334] "Generic (PLEG): container finished" podID="6c5a30dc-06ca-435f-81b9-576f03f05a19" containerID="e4b5fb7a09cbb912f18b48dddb3dffa5249d76b83fcfc068a3c747f22b2389a3" exitCode=0 Dec 06 15:38:48 crc kubenswrapper[5003]: I1206 15:38:48.865734 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gtwdh" event={"ID":"6c5a30dc-06ca-435f-81b9-576f03f05a19","Type":"ContainerDied","Data":"e4b5fb7a09cbb912f18b48dddb3dffa5249d76b83fcfc068a3c747f22b2389a3"} Dec 06 15:38:48 crc kubenswrapper[5003]: I1206 15:38:48.867412 5003 generic.go:334] "Generic (PLEG): container finished" podID="dde2226a-d12b-4c3b-a396-cf72781488ca" containerID="e7a3ca9c9cad0d324fec3f7433c330d72ce5181cd384e7d96107998891afba2d" exitCode=0 Dec 06 15:38:48 crc kubenswrapper[5003]: I1206 15:38:48.867443 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jqxj7" event={"ID":"dde2226a-d12b-4c3b-a396-cf72781488ca","Type":"ContainerDied","Data":"e7a3ca9c9cad0d324fec3f7433c330d72ce5181cd384e7d96107998891afba2d"} Dec 06 15:38:48 crc kubenswrapper[5003]: I1206 15:38:48.867458 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jqxj7" event={"ID":"dde2226a-d12b-4c3b-a396-cf72781488ca","Type":"ContainerDied","Data":"1c87dfd637c4e8f0c657ef5290730c1845ce9b786c88f23f299ca18bfd7552a2"} Dec 06 15:38:48 crc kubenswrapper[5003]: I1206 15:38:48.867538 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-jqxj7" Dec 06 15:38:48 crc kubenswrapper[5003]: I1206 15:38:48.872621 5003 generic.go:334] "Generic (PLEG): container finished" podID="7370c672-28ad-4228-8285-c113c6675ba8" containerID="a6e05613c5b9fca2c80befa1b17e35f07d84b3f182346f87fdc15363be8df4f4" exitCode=0 Dec 06 15:38:48 crc kubenswrapper[5003]: I1206 15:38:48.872664 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mbw9t" event={"ID":"7370c672-28ad-4228-8285-c113c6675ba8","Type":"ContainerDied","Data":"a6e05613c5b9fca2c80befa1b17e35f07d84b3f182346f87fdc15363be8df4f4"} Dec 06 15:38:48 crc kubenswrapper[5003]: I1206 15:38:48.903626 5003 scope.go:117] "RemoveContainer" containerID="e7a3ca9c9cad0d324fec3f7433c330d72ce5181cd384e7d96107998891afba2d" Dec 06 15:38:48 crc kubenswrapper[5003]: I1206 15:38:48.916928 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-b5rnp" Dec 06 15:38:48 crc kubenswrapper[5003]: I1206 15:38:48.942263 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-lwc4r" Dec 06 15:38:48 crc kubenswrapper[5003]: I1206 15:38:48.944595 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-gtwdh" Dec 06 15:38:48 crc kubenswrapper[5003]: I1206 15:38:48.951610 5003 scope.go:117] "RemoveContainer" containerID="ed8aaaf11b223ff3ddd1e723f46807be41455a65ce0c349a8c1d7ed2c322f105" Dec 06 15:38:48 crc kubenswrapper[5003]: I1206 15:38:48.967851 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-mbw9t" Dec 06 15:38:49 crc kubenswrapper[5003]: I1206 15:38:48.996381 5003 scope.go:117] "RemoveContainer" containerID="458cfd98d616a9e7de2f58e0c0d6dfa5b09f40963ae8eb61770509f66f95f37f" Dec 06 15:38:49 crc kubenswrapper[5003]: I1206 15:38:49.007406 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dde2226a-d12b-4c3b-a396-cf72781488ca-utilities\") pod \"dde2226a-d12b-4c3b-a396-cf72781488ca\" (UID: \"dde2226a-d12b-4c3b-a396-cf72781488ca\") " Dec 06 15:38:49 crc kubenswrapper[5003]: I1206 15:38:49.007551 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zw5mx\" (UniqueName: \"kubernetes.io/projected/dde2226a-d12b-4c3b-a396-cf72781488ca-kube-api-access-zw5mx\") pod \"dde2226a-d12b-4c3b-a396-cf72781488ca\" (UID: \"dde2226a-d12b-4c3b-a396-cf72781488ca\") " Dec 06 15:38:49 crc kubenswrapper[5003]: I1206 15:38:49.007598 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dde2226a-d12b-4c3b-a396-cf72781488ca-catalog-content\") pod \"dde2226a-d12b-4c3b-a396-cf72781488ca\" (UID: \"dde2226a-d12b-4c3b-a396-cf72781488ca\") " Dec 06 15:38:49 crc kubenswrapper[5003]: I1206 15:38:49.009106 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dde2226a-d12b-4c3b-a396-cf72781488ca-utilities" (OuterVolumeSpecName: "utilities") pod "dde2226a-d12b-4c3b-a396-cf72781488ca" (UID: "dde2226a-d12b-4c3b-a396-cf72781488ca"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 15:38:49 crc kubenswrapper[5003]: I1206 15:38:49.012810 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dde2226a-d12b-4c3b-a396-cf72781488ca-kube-api-access-zw5mx" (OuterVolumeSpecName: "kube-api-access-zw5mx") pod "dde2226a-d12b-4c3b-a396-cf72781488ca" (UID: "dde2226a-d12b-4c3b-a396-cf72781488ca"). InnerVolumeSpecName "kube-api-access-zw5mx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 15:38:49 crc kubenswrapper[5003]: I1206 15:38:49.025430 5003 scope.go:117] "RemoveContainer" containerID="e7a3ca9c9cad0d324fec3f7433c330d72ce5181cd384e7d96107998891afba2d" Dec 06 15:38:49 crc kubenswrapper[5003]: E1206 15:38:49.025928 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e7a3ca9c9cad0d324fec3f7433c330d72ce5181cd384e7d96107998891afba2d\": container with ID starting with e7a3ca9c9cad0d324fec3f7433c330d72ce5181cd384e7d96107998891afba2d not found: ID does not exist" containerID="e7a3ca9c9cad0d324fec3f7433c330d72ce5181cd384e7d96107998891afba2d" Dec 06 15:38:49 crc kubenswrapper[5003]: I1206 15:38:49.025962 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e7a3ca9c9cad0d324fec3f7433c330d72ce5181cd384e7d96107998891afba2d"} err="failed to get container status \"e7a3ca9c9cad0d324fec3f7433c330d72ce5181cd384e7d96107998891afba2d\": rpc error: code = NotFound desc = could not find container \"e7a3ca9c9cad0d324fec3f7433c330d72ce5181cd384e7d96107998891afba2d\": container with ID starting with e7a3ca9c9cad0d324fec3f7433c330d72ce5181cd384e7d96107998891afba2d not found: ID does not exist" Dec 06 15:38:49 crc kubenswrapper[5003]: I1206 15:38:49.025983 5003 scope.go:117] "RemoveContainer" containerID="ed8aaaf11b223ff3ddd1e723f46807be41455a65ce0c349a8c1d7ed2c322f105" Dec 06 15:38:49 crc kubenswrapper[5003]: E1206 15:38:49.026327 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ed8aaaf11b223ff3ddd1e723f46807be41455a65ce0c349a8c1d7ed2c322f105\": container with ID starting with ed8aaaf11b223ff3ddd1e723f46807be41455a65ce0c349a8c1d7ed2c322f105 not found: ID does not exist" containerID="ed8aaaf11b223ff3ddd1e723f46807be41455a65ce0c349a8c1d7ed2c322f105" Dec 06 15:38:49 crc kubenswrapper[5003]: I1206 15:38:49.026351 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ed8aaaf11b223ff3ddd1e723f46807be41455a65ce0c349a8c1d7ed2c322f105"} err="failed to get container status \"ed8aaaf11b223ff3ddd1e723f46807be41455a65ce0c349a8c1d7ed2c322f105\": rpc error: code = NotFound desc = could not find container \"ed8aaaf11b223ff3ddd1e723f46807be41455a65ce0c349a8c1d7ed2c322f105\": container with ID starting with ed8aaaf11b223ff3ddd1e723f46807be41455a65ce0c349a8c1d7ed2c322f105 not found: ID does not exist" Dec 06 15:38:49 crc kubenswrapper[5003]: I1206 15:38:49.026363 5003 scope.go:117] "RemoveContainer" containerID="458cfd98d616a9e7de2f58e0c0d6dfa5b09f40963ae8eb61770509f66f95f37f" Dec 06 15:38:49 crc kubenswrapper[5003]: E1206 15:38:49.026839 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"458cfd98d616a9e7de2f58e0c0d6dfa5b09f40963ae8eb61770509f66f95f37f\": container with ID starting with 458cfd98d616a9e7de2f58e0c0d6dfa5b09f40963ae8eb61770509f66f95f37f not found: ID does not exist" containerID="458cfd98d616a9e7de2f58e0c0d6dfa5b09f40963ae8eb61770509f66f95f37f" Dec 06 15:38:49 crc kubenswrapper[5003]: I1206 15:38:49.026865 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"458cfd98d616a9e7de2f58e0c0d6dfa5b09f40963ae8eb61770509f66f95f37f"} err="failed to get container status \"458cfd98d616a9e7de2f58e0c0d6dfa5b09f40963ae8eb61770509f66f95f37f\": rpc error: code = NotFound desc = could not find container \"458cfd98d616a9e7de2f58e0c0d6dfa5b09f40963ae8eb61770509f66f95f37f\": container with ID starting with 458cfd98d616a9e7de2f58e0c0d6dfa5b09f40963ae8eb61770509f66f95f37f not found: ID does not exist" Dec 06 15:38:49 crc kubenswrapper[5003]: I1206 15:38:49.069231 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dde2226a-d12b-4c3b-a396-cf72781488ca-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "dde2226a-d12b-4c3b-a396-cf72781488ca" (UID: "dde2226a-d12b-4c3b-a396-cf72781488ca"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 15:38:49 crc kubenswrapper[5003]: I1206 15:38:49.109132 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qbncs\" (UniqueName: \"kubernetes.io/projected/7370c672-28ad-4228-8285-c113c6675ba8-kube-api-access-qbncs\") pod \"7370c672-28ad-4228-8285-c113c6675ba8\" (UID: \"7370c672-28ad-4228-8285-c113c6675ba8\") " Dec 06 15:38:49 crc kubenswrapper[5003]: I1206 15:38:49.109180 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/67506930-842d-411a-b032-26874042995d-utilities\") pod \"67506930-842d-411a-b032-26874042995d\" (UID: \"67506930-842d-411a-b032-26874042995d\") " Dec 06 15:38:49 crc kubenswrapper[5003]: I1206 15:38:49.109209 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7370c672-28ad-4228-8285-c113c6675ba8-catalog-content\") pod \"7370c672-28ad-4228-8285-c113c6675ba8\" (UID: \"7370c672-28ad-4228-8285-c113c6675ba8\") " Dec 06 15:38:49 crc kubenswrapper[5003]: I1206 15:38:49.109227 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cbz6m\" (UniqueName: \"kubernetes.io/projected/765bb4a4-7c41-414b-a9be-a54be49b76ff-kube-api-access-cbz6m\") pod \"765bb4a4-7c41-414b-a9be-a54be49b76ff\" (UID: \"765bb4a4-7c41-414b-a9be-a54be49b76ff\") " Dec 06 15:38:49 crc kubenswrapper[5003]: I1206 15:38:49.109265 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7370c672-28ad-4228-8285-c113c6675ba8-utilities\") pod \"7370c672-28ad-4228-8285-c113c6675ba8\" (UID: \"7370c672-28ad-4228-8285-c113c6675ba8\") " Dec 06 15:38:49 crc kubenswrapper[5003]: I1206 15:38:49.109282 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6c5a30dc-06ca-435f-81b9-576f03f05a19-utilities\") pod \"6c5a30dc-06ca-435f-81b9-576f03f05a19\" (UID: \"6c5a30dc-06ca-435f-81b9-576f03f05a19\") " Dec 06 15:38:49 crc kubenswrapper[5003]: I1206 15:38:49.109307 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w69jz\" (UniqueName: \"kubernetes.io/projected/67506930-842d-411a-b032-26874042995d-kube-api-access-w69jz\") pod \"67506930-842d-411a-b032-26874042995d\" (UID: \"67506930-842d-411a-b032-26874042995d\") " Dec 06 15:38:49 crc kubenswrapper[5003]: I1206 15:38:49.109327 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/67506930-842d-411a-b032-26874042995d-catalog-content\") pod \"67506930-842d-411a-b032-26874042995d\" (UID: \"67506930-842d-411a-b032-26874042995d\") " Dec 06 15:38:49 crc kubenswrapper[5003]: I1206 15:38:49.109358 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/765bb4a4-7c41-414b-a9be-a54be49b76ff-marketplace-operator-metrics\") pod \"765bb4a4-7c41-414b-a9be-a54be49b76ff\" (UID: \"765bb4a4-7c41-414b-a9be-a54be49b76ff\") " Dec 06 15:38:49 crc kubenswrapper[5003]: I1206 15:38:49.109377 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d788v\" (UniqueName: \"kubernetes.io/projected/6c5a30dc-06ca-435f-81b9-576f03f05a19-kube-api-access-d788v\") pod \"6c5a30dc-06ca-435f-81b9-576f03f05a19\" (UID: \"6c5a30dc-06ca-435f-81b9-576f03f05a19\") " Dec 06 15:38:49 crc kubenswrapper[5003]: I1206 15:38:49.109404 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6c5a30dc-06ca-435f-81b9-576f03f05a19-catalog-content\") pod \"6c5a30dc-06ca-435f-81b9-576f03f05a19\" (UID: \"6c5a30dc-06ca-435f-81b9-576f03f05a19\") " Dec 06 15:38:49 crc kubenswrapper[5003]: I1206 15:38:49.109429 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/765bb4a4-7c41-414b-a9be-a54be49b76ff-marketplace-trusted-ca\") pod \"765bb4a4-7c41-414b-a9be-a54be49b76ff\" (UID: \"765bb4a4-7c41-414b-a9be-a54be49b76ff\") " Dec 06 15:38:49 crc kubenswrapper[5003]: I1206 15:38:49.109661 5003 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dde2226a-d12b-4c3b-a396-cf72781488ca-utilities\") on node \"crc\" DevicePath \"\"" Dec 06 15:38:49 crc kubenswrapper[5003]: I1206 15:38:49.109675 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zw5mx\" (UniqueName: \"kubernetes.io/projected/dde2226a-d12b-4c3b-a396-cf72781488ca-kube-api-access-zw5mx\") on node \"crc\" DevicePath \"\"" Dec 06 15:38:49 crc kubenswrapper[5003]: I1206 15:38:49.109686 5003 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dde2226a-d12b-4c3b-a396-cf72781488ca-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 06 15:38:49 crc kubenswrapper[5003]: I1206 15:38:49.110397 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6c5a30dc-06ca-435f-81b9-576f03f05a19-utilities" (OuterVolumeSpecName: "utilities") pod "6c5a30dc-06ca-435f-81b9-576f03f05a19" (UID: "6c5a30dc-06ca-435f-81b9-576f03f05a19"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 15:38:49 crc kubenswrapper[5003]: I1206 15:38:49.110738 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7370c672-28ad-4228-8285-c113c6675ba8-utilities" (OuterVolumeSpecName: "utilities") pod "7370c672-28ad-4228-8285-c113c6675ba8" (UID: "7370c672-28ad-4228-8285-c113c6675ba8"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 15:38:49 crc kubenswrapper[5003]: I1206 15:38:49.110834 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/765bb4a4-7c41-414b-a9be-a54be49b76ff-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "765bb4a4-7c41-414b-a9be-a54be49b76ff" (UID: "765bb4a4-7c41-414b-a9be-a54be49b76ff"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 15:38:49 crc kubenswrapper[5003]: I1206 15:38:49.112309 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/67506930-842d-411a-b032-26874042995d-utilities" (OuterVolumeSpecName: "utilities") pod "67506930-842d-411a-b032-26874042995d" (UID: "67506930-842d-411a-b032-26874042995d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 15:38:49 crc kubenswrapper[5003]: I1206 15:38:49.113273 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/765bb4a4-7c41-414b-a9be-a54be49b76ff-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "765bb4a4-7c41-414b-a9be-a54be49b76ff" (UID: "765bb4a4-7c41-414b-a9be-a54be49b76ff"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 15:38:49 crc kubenswrapper[5003]: I1206 15:38:49.113872 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6c5a30dc-06ca-435f-81b9-576f03f05a19-kube-api-access-d788v" (OuterVolumeSpecName: "kube-api-access-d788v") pod "6c5a30dc-06ca-435f-81b9-576f03f05a19" (UID: "6c5a30dc-06ca-435f-81b9-576f03f05a19"). InnerVolumeSpecName "kube-api-access-d788v". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 15:38:49 crc kubenswrapper[5003]: I1206 15:38:49.115065 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7370c672-28ad-4228-8285-c113c6675ba8-kube-api-access-qbncs" (OuterVolumeSpecName: "kube-api-access-qbncs") pod "7370c672-28ad-4228-8285-c113c6675ba8" (UID: "7370c672-28ad-4228-8285-c113c6675ba8"). InnerVolumeSpecName "kube-api-access-qbncs". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 15:38:49 crc kubenswrapper[5003]: I1206 15:38:49.115678 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/765bb4a4-7c41-414b-a9be-a54be49b76ff-kube-api-access-cbz6m" (OuterVolumeSpecName: "kube-api-access-cbz6m") pod "765bb4a4-7c41-414b-a9be-a54be49b76ff" (UID: "765bb4a4-7c41-414b-a9be-a54be49b76ff"). InnerVolumeSpecName "kube-api-access-cbz6m". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 15:38:49 crc kubenswrapper[5003]: I1206 15:38:49.123649 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/67506930-842d-411a-b032-26874042995d-kube-api-access-w69jz" (OuterVolumeSpecName: "kube-api-access-w69jz") pod "67506930-842d-411a-b032-26874042995d" (UID: "67506930-842d-411a-b032-26874042995d"). InnerVolumeSpecName "kube-api-access-w69jz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 15:38:49 crc kubenswrapper[5003]: I1206 15:38:49.130605 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/67506930-842d-411a-b032-26874042995d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "67506930-842d-411a-b032-26874042995d" (UID: "67506930-842d-411a-b032-26874042995d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 15:38:49 crc kubenswrapper[5003]: I1206 15:38:49.166730 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6c5a30dc-06ca-435f-81b9-576f03f05a19-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "6c5a30dc-06ca-435f-81b9-576f03f05a19" (UID: "6c5a30dc-06ca-435f-81b9-576f03f05a19"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 15:38:49 crc kubenswrapper[5003]: I1206 15:38:49.196433 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-jqxj7"] Dec 06 15:38:49 crc kubenswrapper[5003]: I1206 15:38:49.199568 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-jqxj7"] Dec 06 15:38:49 crc kubenswrapper[5003]: I1206 15:38:49.210578 5003 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/67506930-842d-411a-b032-26874042995d-utilities\") on node \"crc\" DevicePath \"\"" Dec 06 15:38:49 crc kubenswrapper[5003]: I1206 15:38:49.210616 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qbncs\" (UniqueName: \"kubernetes.io/projected/7370c672-28ad-4228-8285-c113c6675ba8-kube-api-access-qbncs\") on node \"crc\" DevicePath \"\"" Dec 06 15:38:49 crc kubenswrapper[5003]: I1206 15:38:49.210627 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cbz6m\" (UniqueName: \"kubernetes.io/projected/765bb4a4-7c41-414b-a9be-a54be49b76ff-kube-api-access-cbz6m\") on node \"crc\" DevicePath \"\"" Dec 06 15:38:49 crc kubenswrapper[5003]: I1206 15:38:49.210635 5003 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7370c672-28ad-4228-8285-c113c6675ba8-utilities\") on node \"crc\" DevicePath \"\"" Dec 06 15:38:49 crc kubenswrapper[5003]: I1206 15:38:49.210643 5003 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6c5a30dc-06ca-435f-81b9-576f03f05a19-utilities\") on node \"crc\" DevicePath \"\"" Dec 06 15:38:49 crc kubenswrapper[5003]: I1206 15:38:49.210652 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w69jz\" (UniqueName: \"kubernetes.io/projected/67506930-842d-411a-b032-26874042995d-kube-api-access-w69jz\") on node \"crc\" DevicePath \"\"" Dec 06 15:38:49 crc kubenswrapper[5003]: I1206 15:38:49.210660 5003 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/67506930-842d-411a-b032-26874042995d-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 06 15:38:49 crc kubenswrapper[5003]: I1206 15:38:49.210668 5003 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/765bb4a4-7c41-414b-a9be-a54be49b76ff-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Dec 06 15:38:49 crc kubenswrapper[5003]: I1206 15:38:49.210677 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d788v\" (UniqueName: \"kubernetes.io/projected/6c5a30dc-06ca-435f-81b9-576f03f05a19-kube-api-access-d788v\") on node \"crc\" DevicePath \"\"" Dec 06 15:38:49 crc kubenswrapper[5003]: I1206 15:38:49.210685 5003 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6c5a30dc-06ca-435f-81b9-576f03f05a19-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 06 15:38:49 crc kubenswrapper[5003]: I1206 15:38:49.210692 5003 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/765bb4a4-7c41-414b-a9be-a54be49b76ff-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 06 15:38:49 crc kubenswrapper[5003]: I1206 15:38:49.212705 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-5fqrn"] Dec 06 15:38:49 crc kubenswrapper[5003]: W1206 15:38:49.217791 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4ae558d3_8724_4da4_bd37_89893945a2f3.slice/crio-963308e7a737b0703695b986967ab9400946ad2e56fbe0c21ca0a6a650054d0f WatchSource:0}: Error finding container 963308e7a737b0703695b986967ab9400946ad2e56fbe0c21ca0a6a650054d0f: Status 404 returned error can't find the container with id 963308e7a737b0703695b986967ab9400946ad2e56fbe0c21ca0a6a650054d0f Dec 06 15:38:49 crc kubenswrapper[5003]: I1206 15:38:49.230163 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7370c672-28ad-4228-8285-c113c6675ba8-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "7370c672-28ad-4228-8285-c113c6675ba8" (UID: "7370c672-28ad-4228-8285-c113c6675ba8"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 15:38:49 crc kubenswrapper[5003]: I1206 15:38:49.312382 5003 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7370c672-28ad-4228-8285-c113c6675ba8-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 06 15:38:49 crc kubenswrapper[5003]: I1206 15:38:49.719197 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dde2226a-d12b-4c3b-a396-cf72781488ca" path="/var/lib/kubelet/pods/dde2226a-d12b-4c3b-a396-cf72781488ca/volumes" Dec 06 15:38:49 crc kubenswrapper[5003]: I1206 15:38:49.879606 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gtwdh" event={"ID":"6c5a30dc-06ca-435f-81b9-576f03f05a19","Type":"ContainerDied","Data":"255332d1f913a88a8becd98fa9b2ebfdbcea6f545d4f2fc8f48a17439bd39f63"} Dec 06 15:38:49 crc kubenswrapper[5003]: I1206 15:38:49.879718 5003 scope.go:117] "RemoveContainer" containerID="e4b5fb7a09cbb912f18b48dddb3dffa5249d76b83fcfc068a3c747f22b2389a3" Dec 06 15:38:49 crc kubenswrapper[5003]: I1206 15:38:49.879999 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-gtwdh" Dec 06 15:38:49 crc kubenswrapper[5003]: I1206 15:38:49.884808 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-5fqrn" event={"ID":"4ae558d3-8724-4da4-bd37-89893945a2f3","Type":"ContainerStarted","Data":"00f9342dff22db86c7387a45e7e1c49b92c4ee2e156b87171296d140a7e575c1"} Dec 06 15:38:49 crc kubenswrapper[5003]: I1206 15:38:49.884833 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-5fqrn" event={"ID":"4ae558d3-8724-4da4-bd37-89893945a2f3","Type":"ContainerStarted","Data":"963308e7a737b0703695b986967ab9400946ad2e56fbe0c21ca0a6a650054d0f"} Dec 06 15:38:49 crc kubenswrapper[5003]: I1206 15:38:49.885456 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-5fqrn" Dec 06 15:38:49 crc kubenswrapper[5003]: I1206 15:38:49.889063 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-5fqrn" Dec 06 15:38:49 crc kubenswrapper[5003]: I1206 15:38:49.889716 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mbw9t" event={"ID":"7370c672-28ad-4228-8285-c113c6675ba8","Type":"ContainerDied","Data":"328e2680dcd49b7f80a84e77f0fefb91e9a07a50d843e3172db1ce340c2fcd54"} Dec 06 15:38:49 crc kubenswrapper[5003]: I1206 15:38:49.889832 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-mbw9t" Dec 06 15:38:49 crc kubenswrapper[5003]: I1206 15:38:49.891526 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-lwc4r" event={"ID":"765bb4a4-7c41-414b-a9be-a54be49b76ff","Type":"ContainerDied","Data":"07bc3a9140f6ab01c341d730484a891bf52fca94a8c3ce59f871d0f460ddfadb"} Dec 06 15:38:49 crc kubenswrapper[5003]: I1206 15:38:49.891929 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-lwc4r" Dec 06 15:38:49 crc kubenswrapper[5003]: I1206 15:38:49.894876 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-b5rnp" event={"ID":"67506930-842d-411a-b032-26874042995d","Type":"ContainerDied","Data":"06944fdc684288d5d0a24af32d250c71a9d01e10e3cee142eb570d81a158e57f"} Dec 06 15:38:49 crc kubenswrapper[5003]: I1206 15:38:49.894931 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-b5rnp" Dec 06 15:38:49 crc kubenswrapper[5003]: I1206 15:38:49.895076 5003 scope.go:117] "RemoveContainer" containerID="f905ccfbc05a78b505e1d01b71a3d3802d9fc897e9de695484f99225a1098306" Dec 06 15:38:49 crc kubenswrapper[5003]: I1206 15:38:49.911623 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-lwc4r"] Dec 06 15:38:49 crc kubenswrapper[5003]: I1206 15:38:49.917803 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-lwc4r"] Dec 06 15:38:49 crc kubenswrapper[5003]: I1206 15:38:49.926971 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-b5rnp"] Dec 06 15:38:49 crc kubenswrapper[5003]: I1206 15:38:49.928049 5003 scope.go:117] "RemoveContainer" containerID="9216affd75358bb2a6ff2e3bc42ff778f149f28f1117596035ccf06ebe995dbf" Dec 06 15:38:49 crc kubenswrapper[5003]: I1206 15:38:49.931723 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-b5rnp"] Dec 06 15:38:49 crc kubenswrapper[5003]: I1206 15:38:49.951424 5003 scope.go:117] "RemoveContainer" containerID="a6e05613c5b9fca2c80befa1b17e35f07d84b3f182346f87fdc15363be8df4f4" Dec 06 15:38:49 crc kubenswrapper[5003]: I1206 15:38:49.953728 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-5fqrn" podStartSLOduration=1.953705164 podStartE2EDuration="1.953705164s" podCreationTimestamp="2025-12-06 15:38:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 15:38:49.941498799 +0000 UTC m=+408.474853180" watchObservedRunningTime="2025-12-06 15:38:49.953705164 +0000 UTC m=+408.487059545" Dec 06 15:38:49 crc kubenswrapper[5003]: I1206 15:38:49.970343 5003 scope.go:117] "RemoveContainer" containerID="87f7a0f1c7eacadd76d10b88264818cc1bb8b5a5d5fae61a0843d9bf22575f2f" Dec 06 15:38:49 crc kubenswrapper[5003]: I1206 15:38:49.974330 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-mbw9t"] Dec 06 15:38:49 crc kubenswrapper[5003]: I1206 15:38:49.987069 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-mbw9t"] Dec 06 15:38:49 crc kubenswrapper[5003]: I1206 15:38:49.994300 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-gtwdh"] Dec 06 15:38:49 crc kubenswrapper[5003]: I1206 15:38:49.995461 5003 scope.go:117] "RemoveContainer" containerID="7e8e69326ef21a317a3c14f1e2f1362c7e0f7e0232f2349288a852a406ca737c" Dec 06 15:38:50 crc kubenswrapper[5003]: I1206 15:38:50.002532 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-gtwdh"] Dec 06 15:38:50 crc kubenswrapper[5003]: I1206 15:38:50.010897 5003 scope.go:117] "RemoveContainer" containerID="980c7720cf8d72efde230240c7082200502a541160ff9e312d89bce6e9dae4b0" Dec 06 15:38:50 crc kubenswrapper[5003]: I1206 15:38:50.032169 5003 scope.go:117] "RemoveContainer" containerID="4ecc296f8d469b13572f3088613c6f5307d365952b1d0bae6750bc39ee54f433" Dec 06 15:38:50 crc kubenswrapper[5003]: I1206 15:38:50.043510 5003 scope.go:117] "RemoveContainer" containerID="dfc4979fa8c3b5bab62b723ac7719bdbbc226ca085488ff2b437bf4809997b3d" Dec 06 15:38:50 crc kubenswrapper[5003]: I1206 15:38:50.057036 5003 scope.go:117] "RemoveContainer" containerID="945cf848ab3571079ee3e03dacb56cb9946d9ab9f2c6e1658097d0818e068eb9" Dec 06 15:38:50 crc kubenswrapper[5003]: I1206 15:38:50.565646 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-n6k6n"] Dec 06 15:38:50 crc kubenswrapper[5003]: E1206 15:38:50.565839 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6c5a30dc-06ca-435f-81b9-576f03f05a19" containerName="extract-content" Dec 06 15:38:50 crc kubenswrapper[5003]: I1206 15:38:50.565850 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="6c5a30dc-06ca-435f-81b9-576f03f05a19" containerName="extract-content" Dec 06 15:38:50 crc kubenswrapper[5003]: E1206 15:38:50.565858 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dde2226a-d12b-4c3b-a396-cf72781488ca" containerName="extract-content" Dec 06 15:38:50 crc kubenswrapper[5003]: I1206 15:38:50.565864 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="dde2226a-d12b-4c3b-a396-cf72781488ca" containerName="extract-content" Dec 06 15:38:50 crc kubenswrapper[5003]: E1206 15:38:50.565870 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6c5a30dc-06ca-435f-81b9-576f03f05a19" containerName="registry-server" Dec 06 15:38:50 crc kubenswrapper[5003]: I1206 15:38:50.565876 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="6c5a30dc-06ca-435f-81b9-576f03f05a19" containerName="registry-server" Dec 06 15:38:50 crc kubenswrapper[5003]: E1206 15:38:50.565882 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7370c672-28ad-4228-8285-c113c6675ba8" containerName="extract-utilities" Dec 06 15:38:50 crc kubenswrapper[5003]: I1206 15:38:50.565889 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="7370c672-28ad-4228-8285-c113c6675ba8" containerName="extract-utilities" Dec 06 15:38:50 crc kubenswrapper[5003]: E1206 15:38:50.565896 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7370c672-28ad-4228-8285-c113c6675ba8" containerName="extract-content" Dec 06 15:38:50 crc kubenswrapper[5003]: I1206 15:38:50.565901 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="7370c672-28ad-4228-8285-c113c6675ba8" containerName="extract-content" Dec 06 15:38:50 crc kubenswrapper[5003]: E1206 15:38:50.565912 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="765bb4a4-7c41-414b-a9be-a54be49b76ff" containerName="marketplace-operator" Dec 06 15:38:50 crc kubenswrapper[5003]: I1206 15:38:50.565918 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="765bb4a4-7c41-414b-a9be-a54be49b76ff" containerName="marketplace-operator" Dec 06 15:38:50 crc kubenswrapper[5003]: E1206 15:38:50.565925 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6c5a30dc-06ca-435f-81b9-576f03f05a19" containerName="extract-utilities" Dec 06 15:38:50 crc kubenswrapper[5003]: I1206 15:38:50.565931 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="6c5a30dc-06ca-435f-81b9-576f03f05a19" containerName="extract-utilities" Dec 06 15:38:50 crc kubenswrapper[5003]: E1206 15:38:50.565940 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dde2226a-d12b-4c3b-a396-cf72781488ca" containerName="extract-utilities" Dec 06 15:38:50 crc kubenswrapper[5003]: I1206 15:38:50.565946 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="dde2226a-d12b-4c3b-a396-cf72781488ca" containerName="extract-utilities" Dec 06 15:38:50 crc kubenswrapper[5003]: E1206 15:38:50.565956 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="67506930-842d-411a-b032-26874042995d" containerName="registry-server" Dec 06 15:38:50 crc kubenswrapper[5003]: I1206 15:38:50.565962 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="67506930-842d-411a-b032-26874042995d" containerName="registry-server" Dec 06 15:38:50 crc kubenswrapper[5003]: E1206 15:38:50.565972 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="67506930-842d-411a-b032-26874042995d" containerName="extract-content" Dec 06 15:38:50 crc kubenswrapper[5003]: I1206 15:38:50.565978 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="67506930-842d-411a-b032-26874042995d" containerName="extract-content" Dec 06 15:38:50 crc kubenswrapper[5003]: E1206 15:38:50.565985 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="765bb4a4-7c41-414b-a9be-a54be49b76ff" containerName="marketplace-operator" Dec 06 15:38:50 crc kubenswrapper[5003]: I1206 15:38:50.565990 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="765bb4a4-7c41-414b-a9be-a54be49b76ff" containerName="marketplace-operator" Dec 06 15:38:50 crc kubenswrapper[5003]: E1206 15:38:50.565998 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7370c672-28ad-4228-8285-c113c6675ba8" containerName="registry-server" Dec 06 15:38:50 crc kubenswrapper[5003]: I1206 15:38:50.566004 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="7370c672-28ad-4228-8285-c113c6675ba8" containerName="registry-server" Dec 06 15:38:50 crc kubenswrapper[5003]: E1206 15:38:50.566011 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dde2226a-d12b-4c3b-a396-cf72781488ca" containerName="registry-server" Dec 06 15:38:50 crc kubenswrapper[5003]: I1206 15:38:50.566016 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="dde2226a-d12b-4c3b-a396-cf72781488ca" containerName="registry-server" Dec 06 15:38:50 crc kubenswrapper[5003]: E1206 15:38:50.566024 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="67506930-842d-411a-b032-26874042995d" containerName="extract-utilities" Dec 06 15:38:50 crc kubenswrapper[5003]: I1206 15:38:50.566030 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="67506930-842d-411a-b032-26874042995d" containerName="extract-utilities" Dec 06 15:38:50 crc kubenswrapper[5003]: I1206 15:38:50.566124 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="67506930-842d-411a-b032-26874042995d" containerName="registry-server" Dec 06 15:38:50 crc kubenswrapper[5003]: I1206 15:38:50.566134 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="7370c672-28ad-4228-8285-c113c6675ba8" containerName="registry-server" Dec 06 15:38:50 crc kubenswrapper[5003]: I1206 15:38:50.566141 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="dde2226a-d12b-4c3b-a396-cf72781488ca" containerName="registry-server" Dec 06 15:38:50 crc kubenswrapper[5003]: I1206 15:38:50.566150 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="6c5a30dc-06ca-435f-81b9-576f03f05a19" containerName="registry-server" Dec 06 15:38:50 crc kubenswrapper[5003]: I1206 15:38:50.566158 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="765bb4a4-7c41-414b-a9be-a54be49b76ff" containerName="marketplace-operator" Dec 06 15:38:50 crc kubenswrapper[5003]: I1206 15:38:50.566166 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="765bb4a4-7c41-414b-a9be-a54be49b76ff" containerName="marketplace-operator" Dec 06 15:38:50 crc kubenswrapper[5003]: I1206 15:38:50.567071 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-n6k6n" Dec 06 15:38:50 crc kubenswrapper[5003]: I1206 15:38:50.572515 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Dec 06 15:38:50 crc kubenswrapper[5003]: I1206 15:38:50.573614 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-n6k6n"] Dec 06 15:38:50 crc kubenswrapper[5003]: I1206 15:38:50.735556 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c4c46a65-2b6a-413a-9dd9-5aaa2d2041f4-catalog-content\") pod \"redhat-marketplace-n6k6n\" (UID: \"c4c46a65-2b6a-413a-9dd9-5aaa2d2041f4\") " pod="openshift-marketplace/redhat-marketplace-n6k6n" Dec 06 15:38:50 crc kubenswrapper[5003]: I1206 15:38:50.735614 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c4c46a65-2b6a-413a-9dd9-5aaa2d2041f4-utilities\") pod \"redhat-marketplace-n6k6n\" (UID: \"c4c46a65-2b6a-413a-9dd9-5aaa2d2041f4\") " pod="openshift-marketplace/redhat-marketplace-n6k6n" Dec 06 15:38:50 crc kubenswrapper[5003]: I1206 15:38:50.735656 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mpc2k\" (UniqueName: \"kubernetes.io/projected/c4c46a65-2b6a-413a-9dd9-5aaa2d2041f4-kube-api-access-mpc2k\") pod \"redhat-marketplace-n6k6n\" (UID: \"c4c46a65-2b6a-413a-9dd9-5aaa2d2041f4\") " pod="openshift-marketplace/redhat-marketplace-n6k6n" Dec 06 15:38:50 crc kubenswrapper[5003]: I1206 15:38:50.761814 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-cvx9j"] Dec 06 15:38:50 crc kubenswrapper[5003]: I1206 15:38:50.762834 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-cvx9j" Dec 06 15:38:50 crc kubenswrapper[5003]: I1206 15:38:50.765688 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Dec 06 15:38:50 crc kubenswrapper[5003]: I1206 15:38:50.771122 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-cvx9j"] Dec 06 15:38:50 crc kubenswrapper[5003]: I1206 15:38:50.837308 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c4c46a65-2b6a-413a-9dd9-5aaa2d2041f4-catalog-content\") pod \"redhat-marketplace-n6k6n\" (UID: \"c4c46a65-2b6a-413a-9dd9-5aaa2d2041f4\") " pod="openshift-marketplace/redhat-marketplace-n6k6n" Dec 06 15:38:50 crc kubenswrapper[5003]: I1206 15:38:50.837368 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c4c46a65-2b6a-413a-9dd9-5aaa2d2041f4-utilities\") pod \"redhat-marketplace-n6k6n\" (UID: \"c4c46a65-2b6a-413a-9dd9-5aaa2d2041f4\") " pod="openshift-marketplace/redhat-marketplace-n6k6n" Dec 06 15:38:50 crc kubenswrapper[5003]: I1206 15:38:50.837422 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mpc2k\" (UniqueName: \"kubernetes.io/projected/c4c46a65-2b6a-413a-9dd9-5aaa2d2041f4-kube-api-access-mpc2k\") pod \"redhat-marketplace-n6k6n\" (UID: \"c4c46a65-2b6a-413a-9dd9-5aaa2d2041f4\") " pod="openshift-marketplace/redhat-marketplace-n6k6n" Dec 06 15:38:50 crc kubenswrapper[5003]: I1206 15:38:50.838521 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c4c46a65-2b6a-413a-9dd9-5aaa2d2041f4-utilities\") pod \"redhat-marketplace-n6k6n\" (UID: \"c4c46a65-2b6a-413a-9dd9-5aaa2d2041f4\") " pod="openshift-marketplace/redhat-marketplace-n6k6n" Dec 06 15:38:50 crc kubenswrapper[5003]: I1206 15:38:50.838550 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c4c46a65-2b6a-413a-9dd9-5aaa2d2041f4-catalog-content\") pod \"redhat-marketplace-n6k6n\" (UID: \"c4c46a65-2b6a-413a-9dd9-5aaa2d2041f4\") " pod="openshift-marketplace/redhat-marketplace-n6k6n" Dec 06 15:38:50 crc kubenswrapper[5003]: I1206 15:38:50.858682 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mpc2k\" (UniqueName: \"kubernetes.io/projected/c4c46a65-2b6a-413a-9dd9-5aaa2d2041f4-kube-api-access-mpc2k\") pod \"redhat-marketplace-n6k6n\" (UID: \"c4c46a65-2b6a-413a-9dd9-5aaa2d2041f4\") " pod="openshift-marketplace/redhat-marketplace-n6k6n" Dec 06 15:38:50 crc kubenswrapper[5003]: I1206 15:38:50.901879 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-n6k6n" Dec 06 15:38:50 crc kubenswrapper[5003]: I1206 15:38:50.940151 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f96g7\" (UniqueName: \"kubernetes.io/projected/1b508a70-c3a8-4f75-ae70-38613a4011cb-kube-api-access-f96g7\") pod \"certified-operators-cvx9j\" (UID: \"1b508a70-c3a8-4f75-ae70-38613a4011cb\") " pod="openshift-marketplace/certified-operators-cvx9j" Dec 06 15:38:50 crc kubenswrapper[5003]: I1206 15:38:50.940303 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1b508a70-c3a8-4f75-ae70-38613a4011cb-utilities\") pod \"certified-operators-cvx9j\" (UID: \"1b508a70-c3a8-4f75-ae70-38613a4011cb\") " pod="openshift-marketplace/certified-operators-cvx9j" Dec 06 15:38:50 crc kubenswrapper[5003]: I1206 15:38:50.940401 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1b508a70-c3a8-4f75-ae70-38613a4011cb-catalog-content\") pod \"certified-operators-cvx9j\" (UID: \"1b508a70-c3a8-4f75-ae70-38613a4011cb\") " pod="openshift-marketplace/certified-operators-cvx9j" Dec 06 15:38:51 crc kubenswrapper[5003]: I1206 15:38:51.041671 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1b508a70-c3a8-4f75-ae70-38613a4011cb-utilities\") pod \"certified-operators-cvx9j\" (UID: \"1b508a70-c3a8-4f75-ae70-38613a4011cb\") " pod="openshift-marketplace/certified-operators-cvx9j" Dec 06 15:38:51 crc kubenswrapper[5003]: I1206 15:38:51.041716 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1b508a70-c3a8-4f75-ae70-38613a4011cb-catalog-content\") pod \"certified-operators-cvx9j\" (UID: \"1b508a70-c3a8-4f75-ae70-38613a4011cb\") " pod="openshift-marketplace/certified-operators-cvx9j" Dec 06 15:38:51 crc kubenswrapper[5003]: I1206 15:38:51.041768 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f96g7\" (UniqueName: \"kubernetes.io/projected/1b508a70-c3a8-4f75-ae70-38613a4011cb-kube-api-access-f96g7\") pod \"certified-operators-cvx9j\" (UID: \"1b508a70-c3a8-4f75-ae70-38613a4011cb\") " pod="openshift-marketplace/certified-operators-cvx9j" Dec 06 15:38:51 crc kubenswrapper[5003]: I1206 15:38:51.042109 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1b508a70-c3a8-4f75-ae70-38613a4011cb-utilities\") pod \"certified-operators-cvx9j\" (UID: \"1b508a70-c3a8-4f75-ae70-38613a4011cb\") " pod="openshift-marketplace/certified-operators-cvx9j" Dec 06 15:38:51 crc kubenswrapper[5003]: I1206 15:38:51.042239 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1b508a70-c3a8-4f75-ae70-38613a4011cb-catalog-content\") pod \"certified-operators-cvx9j\" (UID: \"1b508a70-c3a8-4f75-ae70-38613a4011cb\") " pod="openshift-marketplace/certified-operators-cvx9j" Dec 06 15:38:51 crc kubenswrapper[5003]: I1206 15:38:51.072860 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f96g7\" (UniqueName: \"kubernetes.io/projected/1b508a70-c3a8-4f75-ae70-38613a4011cb-kube-api-access-f96g7\") pod \"certified-operators-cvx9j\" (UID: \"1b508a70-c3a8-4f75-ae70-38613a4011cb\") " pod="openshift-marketplace/certified-operators-cvx9j" Dec 06 15:38:51 crc kubenswrapper[5003]: I1206 15:38:51.092223 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-cvx9j" Dec 06 15:38:51 crc kubenswrapper[5003]: I1206 15:38:51.284057 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-n6k6n"] Dec 06 15:38:51 crc kubenswrapper[5003]: W1206 15:38:51.289176 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc4c46a65_2b6a_413a_9dd9_5aaa2d2041f4.slice/crio-7b06988108a5ddb4658149b016098286bc93559307b9138670719d55f6daae06 WatchSource:0}: Error finding container 7b06988108a5ddb4658149b016098286bc93559307b9138670719d55f6daae06: Status 404 returned error can't find the container with id 7b06988108a5ddb4658149b016098286bc93559307b9138670719d55f6daae06 Dec 06 15:38:51 crc kubenswrapper[5003]: W1206 15:38:51.518990 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1b508a70_c3a8_4f75_ae70_38613a4011cb.slice/crio-39c259e465e721478dc1426329ef40682ffe0a141159eea2d3097b2c93976e2a WatchSource:0}: Error finding container 39c259e465e721478dc1426329ef40682ffe0a141159eea2d3097b2c93976e2a: Status 404 returned error can't find the container with id 39c259e465e721478dc1426329ef40682ffe0a141159eea2d3097b2c93976e2a Dec 06 15:38:51 crc kubenswrapper[5003]: I1206 15:38:51.519070 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-cvx9j"] Dec 06 15:38:51 crc kubenswrapper[5003]: I1206 15:38:51.720118 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="67506930-842d-411a-b032-26874042995d" path="/var/lib/kubelet/pods/67506930-842d-411a-b032-26874042995d/volumes" Dec 06 15:38:51 crc kubenswrapper[5003]: I1206 15:38:51.721038 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6c5a30dc-06ca-435f-81b9-576f03f05a19" path="/var/lib/kubelet/pods/6c5a30dc-06ca-435f-81b9-576f03f05a19/volumes" Dec 06 15:38:51 crc kubenswrapper[5003]: I1206 15:38:51.721875 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7370c672-28ad-4228-8285-c113c6675ba8" path="/var/lib/kubelet/pods/7370c672-28ad-4228-8285-c113c6675ba8/volumes" Dec 06 15:38:51 crc kubenswrapper[5003]: I1206 15:38:51.723169 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="765bb4a4-7c41-414b-a9be-a54be49b76ff" path="/var/lib/kubelet/pods/765bb4a4-7c41-414b-a9be-a54be49b76ff/volumes" Dec 06 15:38:51 crc kubenswrapper[5003]: I1206 15:38:51.913679 5003 generic.go:334] "Generic (PLEG): container finished" podID="c4c46a65-2b6a-413a-9dd9-5aaa2d2041f4" containerID="ef352f9df5d7253f55f93b036c6216e52484c8d99635f22f0666a7411e9076e4" exitCode=0 Dec 06 15:38:51 crc kubenswrapper[5003]: I1206 15:38:51.913725 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-n6k6n" event={"ID":"c4c46a65-2b6a-413a-9dd9-5aaa2d2041f4","Type":"ContainerDied","Data":"ef352f9df5d7253f55f93b036c6216e52484c8d99635f22f0666a7411e9076e4"} Dec 06 15:38:51 crc kubenswrapper[5003]: I1206 15:38:51.913759 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-n6k6n" event={"ID":"c4c46a65-2b6a-413a-9dd9-5aaa2d2041f4","Type":"ContainerStarted","Data":"7b06988108a5ddb4658149b016098286bc93559307b9138670719d55f6daae06"} Dec 06 15:38:51 crc kubenswrapper[5003]: I1206 15:38:51.918981 5003 generic.go:334] "Generic (PLEG): container finished" podID="1b508a70-c3a8-4f75-ae70-38613a4011cb" containerID="412db6f7c841d04f349b845825764662115e7220958f1de4718f3277dc3252cc" exitCode=0 Dec 06 15:38:51 crc kubenswrapper[5003]: I1206 15:38:51.919054 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-cvx9j" event={"ID":"1b508a70-c3a8-4f75-ae70-38613a4011cb","Type":"ContainerDied","Data":"412db6f7c841d04f349b845825764662115e7220958f1de4718f3277dc3252cc"} Dec 06 15:38:51 crc kubenswrapper[5003]: I1206 15:38:51.919097 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-cvx9j" event={"ID":"1b508a70-c3a8-4f75-ae70-38613a4011cb","Type":"ContainerStarted","Data":"39c259e465e721478dc1426329ef40682ffe0a141159eea2d3097b2c93976e2a"} Dec 06 15:38:52 crc kubenswrapper[5003]: I1206 15:38:52.962635 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-brzlr"] Dec 06 15:38:52 crc kubenswrapper[5003]: I1206 15:38:52.964656 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-brzlr" Dec 06 15:38:52 crc kubenswrapper[5003]: I1206 15:38:52.967045 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Dec 06 15:38:52 crc kubenswrapper[5003]: I1206 15:38:52.973968 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-brzlr"] Dec 06 15:38:53 crc kubenswrapper[5003]: I1206 15:38:53.066173 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/462e91e7-2b3b-4fd2-bbb1-94b4a727fe1e-catalog-content\") pod \"community-operators-brzlr\" (UID: \"462e91e7-2b3b-4fd2-bbb1-94b4a727fe1e\") " pod="openshift-marketplace/community-operators-brzlr" Dec 06 15:38:53 crc kubenswrapper[5003]: I1206 15:38:53.066482 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-97xxr\" (UniqueName: \"kubernetes.io/projected/462e91e7-2b3b-4fd2-bbb1-94b4a727fe1e-kube-api-access-97xxr\") pod \"community-operators-brzlr\" (UID: \"462e91e7-2b3b-4fd2-bbb1-94b4a727fe1e\") " pod="openshift-marketplace/community-operators-brzlr" Dec 06 15:38:53 crc kubenswrapper[5003]: I1206 15:38:53.066526 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/462e91e7-2b3b-4fd2-bbb1-94b4a727fe1e-utilities\") pod \"community-operators-brzlr\" (UID: \"462e91e7-2b3b-4fd2-bbb1-94b4a727fe1e\") " pod="openshift-marketplace/community-operators-brzlr" Dec 06 15:38:53 crc kubenswrapper[5003]: I1206 15:38:53.167328 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/462e91e7-2b3b-4fd2-bbb1-94b4a727fe1e-utilities\") pod \"community-operators-brzlr\" (UID: \"462e91e7-2b3b-4fd2-bbb1-94b4a727fe1e\") " pod="openshift-marketplace/community-operators-brzlr" Dec 06 15:38:53 crc kubenswrapper[5003]: I1206 15:38:53.167529 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/462e91e7-2b3b-4fd2-bbb1-94b4a727fe1e-catalog-content\") pod \"community-operators-brzlr\" (UID: \"462e91e7-2b3b-4fd2-bbb1-94b4a727fe1e\") " pod="openshift-marketplace/community-operators-brzlr" Dec 06 15:38:53 crc kubenswrapper[5003]: I1206 15:38:53.167580 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-97xxr\" (UniqueName: \"kubernetes.io/projected/462e91e7-2b3b-4fd2-bbb1-94b4a727fe1e-kube-api-access-97xxr\") pod \"community-operators-brzlr\" (UID: \"462e91e7-2b3b-4fd2-bbb1-94b4a727fe1e\") " pod="openshift-marketplace/community-operators-brzlr" Dec 06 15:38:53 crc kubenswrapper[5003]: I1206 15:38:53.167999 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/462e91e7-2b3b-4fd2-bbb1-94b4a727fe1e-utilities\") pod \"community-operators-brzlr\" (UID: \"462e91e7-2b3b-4fd2-bbb1-94b4a727fe1e\") " pod="openshift-marketplace/community-operators-brzlr" Dec 06 15:38:53 crc kubenswrapper[5003]: I1206 15:38:53.168225 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/462e91e7-2b3b-4fd2-bbb1-94b4a727fe1e-catalog-content\") pod \"community-operators-brzlr\" (UID: \"462e91e7-2b3b-4fd2-bbb1-94b4a727fe1e\") " pod="openshift-marketplace/community-operators-brzlr" Dec 06 15:38:53 crc kubenswrapper[5003]: I1206 15:38:53.170237 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-4f6q7"] Dec 06 15:38:53 crc kubenswrapper[5003]: I1206 15:38:53.171329 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-4f6q7" Dec 06 15:38:53 crc kubenswrapper[5003]: I1206 15:38:53.178864 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Dec 06 15:38:53 crc kubenswrapper[5003]: I1206 15:38:53.179369 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-4f6q7"] Dec 06 15:38:53 crc kubenswrapper[5003]: I1206 15:38:53.200674 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-97xxr\" (UniqueName: \"kubernetes.io/projected/462e91e7-2b3b-4fd2-bbb1-94b4a727fe1e-kube-api-access-97xxr\") pod \"community-operators-brzlr\" (UID: \"462e91e7-2b3b-4fd2-bbb1-94b4a727fe1e\") " pod="openshift-marketplace/community-operators-brzlr" Dec 06 15:38:53 crc kubenswrapper[5003]: I1206 15:38:53.268597 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ebf945ad-37bf-4837-8fce-af8b8634c82f-utilities\") pod \"redhat-operators-4f6q7\" (UID: \"ebf945ad-37bf-4837-8fce-af8b8634c82f\") " pod="openshift-marketplace/redhat-operators-4f6q7" Dec 06 15:38:53 crc kubenswrapper[5003]: I1206 15:38:53.268748 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t8w9n\" (UniqueName: \"kubernetes.io/projected/ebf945ad-37bf-4837-8fce-af8b8634c82f-kube-api-access-t8w9n\") pod \"redhat-operators-4f6q7\" (UID: \"ebf945ad-37bf-4837-8fce-af8b8634c82f\") " pod="openshift-marketplace/redhat-operators-4f6q7" Dec 06 15:38:53 crc kubenswrapper[5003]: I1206 15:38:53.268786 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ebf945ad-37bf-4837-8fce-af8b8634c82f-catalog-content\") pod \"redhat-operators-4f6q7\" (UID: \"ebf945ad-37bf-4837-8fce-af8b8634c82f\") " pod="openshift-marketplace/redhat-operators-4f6q7" Dec 06 15:38:53 crc kubenswrapper[5003]: I1206 15:38:53.291012 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-brzlr" Dec 06 15:38:53 crc kubenswrapper[5003]: I1206 15:38:53.369467 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t8w9n\" (UniqueName: \"kubernetes.io/projected/ebf945ad-37bf-4837-8fce-af8b8634c82f-kube-api-access-t8w9n\") pod \"redhat-operators-4f6q7\" (UID: \"ebf945ad-37bf-4837-8fce-af8b8634c82f\") " pod="openshift-marketplace/redhat-operators-4f6q7" Dec 06 15:38:53 crc kubenswrapper[5003]: I1206 15:38:53.369535 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ebf945ad-37bf-4837-8fce-af8b8634c82f-catalog-content\") pod \"redhat-operators-4f6q7\" (UID: \"ebf945ad-37bf-4837-8fce-af8b8634c82f\") " pod="openshift-marketplace/redhat-operators-4f6q7" Dec 06 15:38:53 crc kubenswrapper[5003]: I1206 15:38:53.369558 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ebf945ad-37bf-4837-8fce-af8b8634c82f-utilities\") pod \"redhat-operators-4f6q7\" (UID: \"ebf945ad-37bf-4837-8fce-af8b8634c82f\") " pod="openshift-marketplace/redhat-operators-4f6q7" Dec 06 15:38:53 crc kubenswrapper[5003]: I1206 15:38:53.369995 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ebf945ad-37bf-4837-8fce-af8b8634c82f-utilities\") pod \"redhat-operators-4f6q7\" (UID: \"ebf945ad-37bf-4837-8fce-af8b8634c82f\") " pod="openshift-marketplace/redhat-operators-4f6q7" Dec 06 15:38:53 crc kubenswrapper[5003]: I1206 15:38:53.370721 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ebf945ad-37bf-4837-8fce-af8b8634c82f-catalog-content\") pod \"redhat-operators-4f6q7\" (UID: \"ebf945ad-37bf-4837-8fce-af8b8634c82f\") " pod="openshift-marketplace/redhat-operators-4f6q7" Dec 06 15:38:53 crc kubenswrapper[5003]: I1206 15:38:53.393204 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t8w9n\" (UniqueName: \"kubernetes.io/projected/ebf945ad-37bf-4837-8fce-af8b8634c82f-kube-api-access-t8w9n\") pod \"redhat-operators-4f6q7\" (UID: \"ebf945ad-37bf-4837-8fce-af8b8634c82f\") " pod="openshift-marketplace/redhat-operators-4f6q7" Dec 06 15:38:53 crc kubenswrapper[5003]: I1206 15:38:53.536204 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-4f6q7" Dec 06 15:38:53 crc kubenswrapper[5003]: I1206 15:38:53.693035 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-brzlr"] Dec 06 15:38:53 crc kubenswrapper[5003]: I1206 15:38:53.936068 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-4f6q7"] Dec 06 15:38:53 crc kubenswrapper[5003]: I1206 15:38:53.937298 5003 generic.go:334] "Generic (PLEG): container finished" podID="462e91e7-2b3b-4fd2-bbb1-94b4a727fe1e" containerID="f20746169c4e96ac13e48990ed8f1fddb719d7ee33edd8d7cd5a882c263dcd5c" exitCode=0 Dec 06 15:38:53 crc kubenswrapper[5003]: I1206 15:38:53.937364 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-brzlr" event={"ID":"462e91e7-2b3b-4fd2-bbb1-94b4a727fe1e","Type":"ContainerDied","Data":"f20746169c4e96ac13e48990ed8f1fddb719d7ee33edd8d7cd5a882c263dcd5c"} Dec 06 15:38:53 crc kubenswrapper[5003]: I1206 15:38:53.937390 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-brzlr" event={"ID":"462e91e7-2b3b-4fd2-bbb1-94b4a727fe1e","Type":"ContainerStarted","Data":"fb6e40f2ac7a97281bb3bfb2029a745f79d54b22ad4b611dc26e5504e7935a99"} Dec 06 15:38:53 crc kubenswrapper[5003]: I1206 15:38:53.941025 5003 generic.go:334] "Generic (PLEG): container finished" podID="1b508a70-c3a8-4f75-ae70-38613a4011cb" containerID="967a4666752a0921d90e36a67c4f50451c5cbdb33302596e3de9f0052adadacb" exitCode=0 Dec 06 15:38:53 crc kubenswrapper[5003]: I1206 15:38:53.941200 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-cvx9j" event={"ID":"1b508a70-c3a8-4f75-ae70-38613a4011cb","Type":"ContainerDied","Data":"967a4666752a0921d90e36a67c4f50451c5cbdb33302596e3de9f0052adadacb"} Dec 06 15:38:53 crc kubenswrapper[5003]: I1206 15:38:53.947203 5003 generic.go:334] "Generic (PLEG): container finished" podID="c4c46a65-2b6a-413a-9dd9-5aaa2d2041f4" containerID="a2856612af8cbf6c20ef382497638ae12759d407fde6cfc7737ca1971e2d304c" exitCode=0 Dec 06 15:38:53 crc kubenswrapper[5003]: I1206 15:38:53.947250 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-n6k6n" event={"ID":"c4c46a65-2b6a-413a-9dd9-5aaa2d2041f4","Type":"ContainerDied","Data":"a2856612af8cbf6c20ef382497638ae12759d407fde6cfc7737ca1971e2d304c"} Dec 06 15:38:53 crc kubenswrapper[5003]: W1206 15:38:53.965221 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podebf945ad_37bf_4837_8fce_af8b8634c82f.slice/crio-a81031f3c60b73fd66f73a6669ddd1e2aca08b9be54a508d99f7790c3c774d7a WatchSource:0}: Error finding container a81031f3c60b73fd66f73a6669ddd1e2aca08b9be54a508d99f7790c3c774d7a: Status 404 returned error can't find the container with id a81031f3c60b73fd66f73a6669ddd1e2aca08b9be54a508d99f7790c3c774d7a Dec 06 15:38:54 crc kubenswrapper[5003]: I1206 15:38:54.954264 5003 generic.go:334] "Generic (PLEG): container finished" podID="ebf945ad-37bf-4837-8fce-af8b8634c82f" containerID="de91c4e88be7a2cb7b25fbf4fceb85ba386f696ad8b410bc4a330c0ad29f759c" exitCode=0 Dec 06 15:38:54 crc kubenswrapper[5003]: I1206 15:38:54.954380 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4f6q7" event={"ID":"ebf945ad-37bf-4837-8fce-af8b8634c82f","Type":"ContainerDied","Data":"de91c4e88be7a2cb7b25fbf4fceb85ba386f696ad8b410bc4a330c0ad29f759c"} Dec 06 15:38:54 crc kubenswrapper[5003]: I1206 15:38:54.954824 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4f6q7" event={"ID":"ebf945ad-37bf-4837-8fce-af8b8634c82f","Type":"ContainerStarted","Data":"a81031f3c60b73fd66f73a6669ddd1e2aca08b9be54a508d99f7790c3c774d7a"} Dec 06 15:38:56 crc kubenswrapper[5003]: I1206 15:38:56.011958 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-cvx9j" event={"ID":"1b508a70-c3a8-4f75-ae70-38613a4011cb","Type":"ContainerStarted","Data":"7e6c797c5cff762968f6739e913ac782570d800872a11bf12f0cadc95b847b37"} Dec 06 15:38:56 crc kubenswrapper[5003]: I1206 15:38:56.014537 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-n6k6n" event={"ID":"c4c46a65-2b6a-413a-9dd9-5aaa2d2041f4","Type":"ContainerStarted","Data":"a22ff5df8146c28d018d85fa986b610ddd2873eeaf7b330a3b05e5992b047687"} Dec 06 15:38:56 crc kubenswrapper[5003]: I1206 15:38:56.017377 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-brzlr" event={"ID":"462e91e7-2b3b-4fd2-bbb1-94b4a727fe1e","Type":"ContainerStarted","Data":"93e9ddec53ce92eb8966007a574904006c988128cee80c7744473277ca1592c7"} Dec 06 15:38:56 crc kubenswrapper[5003]: I1206 15:38:56.044735 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-cvx9j" podStartSLOduration=2.483734224 podStartE2EDuration="6.044713012s" podCreationTimestamp="2025-12-06 15:38:50 +0000 UTC" firstStartedPulling="2025-12-06 15:38:51.920475948 +0000 UTC m=+410.453830329" lastFinishedPulling="2025-12-06 15:38:55.481454736 +0000 UTC m=+414.014809117" observedRunningTime="2025-12-06 15:38:56.041300732 +0000 UTC m=+414.574655113" watchObservedRunningTime="2025-12-06 15:38:56.044713012 +0000 UTC m=+414.578067413" Dec 06 15:38:56 crc kubenswrapper[5003]: E1206 15:38:56.118814 5003 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod462e91e7_2b3b_4fd2_bbb1_94b4a727fe1e.slice/crio-conmon-93e9ddec53ce92eb8966007a574904006c988128cee80c7744473277ca1592c7.scope\": RecentStats: unable to find data in memory cache]" Dec 06 15:38:57 crc kubenswrapper[5003]: I1206 15:38:57.028004 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4f6q7" event={"ID":"ebf945ad-37bf-4837-8fce-af8b8634c82f","Type":"ContainerStarted","Data":"871c78cb3e06839137324be3637771dcdd1ac3310b269a1a917a3f2d39c3b14d"} Dec 06 15:38:57 crc kubenswrapper[5003]: I1206 15:38:57.030818 5003 generic.go:334] "Generic (PLEG): container finished" podID="462e91e7-2b3b-4fd2-bbb1-94b4a727fe1e" containerID="93e9ddec53ce92eb8966007a574904006c988128cee80c7744473277ca1592c7" exitCode=0 Dec 06 15:38:57 crc kubenswrapper[5003]: I1206 15:38:57.032037 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-brzlr" event={"ID":"462e91e7-2b3b-4fd2-bbb1-94b4a727fe1e","Type":"ContainerDied","Data":"93e9ddec53ce92eb8966007a574904006c988128cee80c7744473277ca1592c7"} Dec 06 15:38:57 crc kubenswrapper[5003]: I1206 15:38:57.057921 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-n6k6n" podStartSLOduration=4.224082554 podStartE2EDuration="7.057899641s" podCreationTimestamp="2025-12-06 15:38:50 +0000 UTC" firstStartedPulling="2025-12-06 15:38:51.915389451 +0000 UTC m=+410.448743822" lastFinishedPulling="2025-12-06 15:38:54.749206528 +0000 UTC m=+413.282560909" observedRunningTime="2025-12-06 15:38:56.095582713 +0000 UTC m=+414.628937114" watchObservedRunningTime="2025-12-06 15:38:57.057899641 +0000 UTC m=+415.591254032" Dec 06 15:38:58 crc kubenswrapper[5003]: I1206 15:38:58.038541 5003 generic.go:334] "Generic (PLEG): container finished" podID="ebf945ad-37bf-4837-8fce-af8b8634c82f" containerID="871c78cb3e06839137324be3637771dcdd1ac3310b269a1a917a3f2d39c3b14d" exitCode=0 Dec 06 15:38:58 crc kubenswrapper[5003]: I1206 15:38:58.038923 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4f6q7" event={"ID":"ebf945ad-37bf-4837-8fce-af8b8634c82f","Type":"ContainerDied","Data":"871c78cb3e06839137324be3637771dcdd1ac3310b269a1a917a3f2d39c3b14d"} Dec 06 15:39:00 crc kubenswrapper[5003]: I1206 15:39:00.052391 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-brzlr" event={"ID":"462e91e7-2b3b-4fd2-bbb1-94b4a727fe1e","Type":"ContainerStarted","Data":"15e6bc3311e949d260afeae4aa6c093e07a30f96e65386c6582c9660b01381f6"} Dec 06 15:39:00 crc kubenswrapper[5003]: I1206 15:39:00.054414 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4f6q7" event={"ID":"ebf945ad-37bf-4837-8fce-af8b8634c82f","Type":"ContainerStarted","Data":"5e09bdf7858180c5571b801a380c219e2321181d1193b8a9982dc3b2c4ec8d21"} Dec 06 15:39:00 crc kubenswrapper[5003]: I1206 15:39:00.093477 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-4f6q7" podStartSLOduration=3.064349694 podStartE2EDuration="7.093443205s" podCreationTimestamp="2025-12-06 15:38:53 +0000 UTC" firstStartedPulling="2025-12-06 15:38:55.182217821 +0000 UTC m=+413.715572202" lastFinishedPulling="2025-12-06 15:38:59.211311332 +0000 UTC m=+417.744665713" observedRunningTime="2025-12-06 15:39:00.090914061 +0000 UTC m=+418.624268472" watchObservedRunningTime="2025-12-06 15:39:00.093443205 +0000 UTC m=+418.626797586" Dec 06 15:39:00 crc kubenswrapper[5003]: I1206 15:39:00.094967 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-brzlr" podStartSLOduration=4.148158454 podStartE2EDuration="8.094957989s" podCreationTimestamp="2025-12-06 15:38:52 +0000 UTC" firstStartedPulling="2025-12-06 15:38:53.939935827 +0000 UTC m=+412.473290208" lastFinishedPulling="2025-12-06 15:38:57.886735362 +0000 UTC m=+416.420089743" observedRunningTime="2025-12-06 15:39:00.071824565 +0000 UTC m=+418.605178946" watchObservedRunningTime="2025-12-06 15:39:00.094957989 +0000 UTC m=+418.628312360" Dec 06 15:39:00 crc kubenswrapper[5003]: I1206 15:39:00.902919 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-n6k6n" Dec 06 15:39:00 crc kubenswrapper[5003]: I1206 15:39:00.902974 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-n6k6n" Dec 06 15:39:00 crc kubenswrapper[5003]: I1206 15:39:00.961849 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-n6k6n" Dec 06 15:39:01 crc kubenswrapper[5003]: I1206 15:39:01.092926 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-cvx9j" Dec 06 15:39:01 crc kubenswrapper[5003]: I1206 15:39:01.093725 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-cvx9j" Dec 06 15:39:01 crc kubenswrapper[5003]: I1206 15:39:01.106094 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-n6k6n" Dec 06 15:39:01 crc kubenswrapper[5003]: I1206 15:39:01.151421 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-cvx9j" Dec 06 15:39:02 crc kubenswrapper[5003]: I1206 15:39:02.114176 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-cvx9j" Dec 06 15:39:03 crc kubenswrapper[5003]: I1206 15:39:03.291550 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-brzlr" Dec 06 15:39:03 crc kubenswrapper[5003]: I1206 15:39:03.291861 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-brzlr" Dec 06 15:39:03 crc kubenswrapper[5003]: I1206 15:39:03.332282 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-brzlr" Dec 06 15:39:03 crc kubenswrapper[5003]: I1206 15:39:03.536418 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-4f6q7" Dec 06 15:39:03 crc kubenswrapper[5003]: I1206 15:39:03.536536 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-4f6q7" Dec 06 15:39:04 crc kubenswrapper[5003]: I1206 15:39:04.112818 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-brzlr" Dec 06 15:39:04 crc kubenswrapper[5003]: I1206 15:39:04.583716 5003 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-4f6q7" podUID="ebf945ad-37bf-4837-8fce-af8b8634c82f" containerName="registry-server" probeResult="failure" output=< Dec 06 15:39:04 crc kubenswrapper[5003]: timeout: failed to connect service ":50051" within 1s Dec 06 15:39:04 crc kubenswrapper[5003]: > Dec 06 15:39:07 crc kubenswrapper[5003]: I1206 15:39:07.858850 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-image-registry/image-registry-697d97f7c8-tvwvh" podUID="5cb1719e-962f-436c-bbc0-cd048de8dd14" containerName="registry" containerID="cri-o://1c712938771ac6a053daa14ba2940540518c7ddb9054b917da1d98c13fed6044" gracePeriod=30 Dec 06 15:39:08 crc kubenswrapper[5003]: I1206 15:39:08.391316 5003 patch_prober.go:28] interesting pod/image-registry-697d97f7c8-tvwvh container/registry namespace/openshift-image-registry: Readiness probe status=failure output="Get \"https://10.217.0.20:5000/healthz\": dial tcp 10.217.0.20:5000: connect: connection refused" start-of-body= Dec 06 15:39:08 crc kubenswrapper[5003]: I1206 15:39:08.391610 5003 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-image-registry/image-registry-697d97f7c8-tvwvh" podUID="5cb1719e-962f-436c-bbc0-cd048de8dd14" containerName="registry" probeResult="failure" output="Get \"https://10.217.0.20:5000/healthz\": dial tcp 10.217.0.20:5000: connect: connection refused" Dec 06 15:39:12 crc kubenswrapper[5003]: I1206 15:39:12.508561 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-tvwvh" Dec 06 15:39:12 crc kubenswrapper[5003]: I1206 15:39:12.635247 5003 generic.go:334] "Generic (PLEG): container finished" podID="5cb1719e-962f-436c-bbc0-cd048de8dd14" containerID="1c712938771ac6a053daa14ba2940540518c7ddb9054b917da1d98c13fed6044" exitCode=0 Dec 06 15:39:12 crc kubenswrapper[5003]: I1206 15:39:12.635305 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-tvwvh" event={"ID":"5cb1719e-962f-436c-bbc0-cd048de8dd14","Type":"ContainerDied","Data":"1c712938771ac6a053daa14ba2940540518c7ddb9054b917da1d98c13fed6044"} Dec 06 15:39:12 crc kubenswrapper[5003]: I1206 15:39:12.635340 5003 scope.go:117] "RemoveContainer" containerID="1c712938771ac6a053daa14ba2940540518c7ddb9054b917da1d98c13fed6044" Dec 06 15:39:12 crc kubenswrapper[5003]: I1206 15:39:12.683444 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-storage\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"5cb1719e-962f-436c-bbc0-cd048de8dd14\" (UID: \"5cb1719e-962f-436c-bbc0-cd048de8dd14\") " Dec 06 15:39:12 crc kubenswrapper[5003]: I1206 15:39:12.683536 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/5cb1719e-962f-436c-bbc0-cd048de8dd14-trusted-ca\") pod \"5cb1719e-962f-436c-bbc0-cd048de8dd14\" (UID: \"5cb1719e-962f-436c-bbc0-cd048de8dd14\") " Dec 06 15:39:12 crc kubenswrapper[5003]: I1206 15:39:12.683585 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/5cb1719e-962f-436c-bbc0-cd048de8dd14-installation-pull-secrets\") pod \"5cb1719e-962f-436c-bbc0-cd048de8dd14\" (UID: \"5cb1719e-962f-436c-bbc0-cd048de8dd14\") " Dec 06 15:39:12 crc kubenswrapper[5003]: I1206 15:39:12.683623 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/5cb1719e-962f-436c-bbc0-cd048de8dd14-registry-tls\") pod \"5cb1719e-962f-436c-bbc0-cd048de8dd14\" (UID: \"5cb1719e-962f-436c-bbc0-cd048de8dd14\") " Dec 06 15:39:12 crc kubenswrapper[5003]: I1206 15:39:12.683648 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/5cb1719e-962f-436c-bbc0-cd048de8dd14-registry-certificates\") pod \"5cb1719e-962f-436c-bbc0-cd048de8dd14\" (UID: \"5cb1719e-962f-436c-bbc0-cd048de8dd14\") " Dec 06 15:39:12 crc kubenswrapper[5003]: I1206 15:39:12.683672 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/5cb1719e-962f-436c-bbc0-cd048de8dd14-ca-trust-extracted\") pod \"5cb1719e-962f-436c-bbc0-cd048de8dd14\" (UID: \"5cb1719e-962f-436c-bbc0-cd048de8dd14\") " Dec 06 15:39:12 crc kubenswrapper[5003]: I1206 15:39:12.683714 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/5cb1719e-962f-436c-bbc0-cd048de8dd14-bound-sa-token\") pod \"5cb1719e-962f-436c-bbc0-cd048de8dd14\" (UID: \"5cb1719e-962f-436c-bbc0-cd048de8dd14\") " Dec 06 15:39:12 crc kubenswrapper[5003]: I1206 15:39:12.683738 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-htvtr\" (UniqueName: \"kubernetes.io/projected/5cb1719e-962f-436c-bbc0-cd048de8dd14-kube-api-access-htvtr\") pod \"5cb1719e-962f-436c-bbc0-cd048de8dd14\" (UID: \"5cb1719e-962f-436c-bbc0-cd048de8dd14\") " Dec 06 15:39:12 crc kubenswrapper[5003]: I1206 15:39:12.684670 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5cb1719e-962f-436c-bbc0-cd048de8dd14-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "5cb1719e-962f-436c-bbc0-cd048de8dd14" (UID: "5cb1719e-962f-436c-bbc0-cd048de8dd14"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 15:39:12 crc kubenswrapper[5003]: I1206 15:39:12.684962 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5cb1719e-962f-436c-bbc0-cd048de8dd14-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "5cb1719e-962f-436c-bbc0-cd048de8dd14" (UID: "5cb1719e-962f-436c-bbc0-cd048de8dd14"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 15:39:12 crc kubenswrapper[5003]: I1206 15:39:12.684991 5003 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/5cb1719e-962f-436c-bbc0-cd048de8dd14-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 06 15:39:12 crc kubenswrapper[5003]: I1206 15:39:12.689871 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5cb1719e-962f-436c-bbc0-cd048de8dd14-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "5cb1719e-962f-436c-bbc0-cd048de8dd14" (UID: "5cb1719e-962f-436c-bbc0-cd048de8dd14"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 15:39:12 crc kubenswrapper[5003]: I1206 15:39:12.689902 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5cb1719e-962f-436c-bbc0-cd048de8dd14-kube-api-access-htvtr" (OuterVolumeSpecName: "kube-api-access-htvtr") pod "5cb1719e-962f-436c-bbc0-cd048de8dd14" (UID: "5cb1719e-962f-436c-bbc0-cd048de8dd14"). InnerVolumeSpecName "kube-api-access-htvtr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 15:39:12 crc kubenswrapper[5003]: I1206 15:39:12.693406 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5cb1719e-962f-436c-bbc0-cd048de8dd14-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "5cb1719e-962f-436c-bbc0-cd048de8dd14" (UID: "5cb1719e-962f-436c-bbc0-cd048de8dd14"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 15:39:12 crc kubenswrapper[5003]: I1206 15:39:12.697663 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5cb1719e-962f-436c-bbc0-cd048de8dd14-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "5cb1719e-962f-436c-bbc0-cd048de8dd14" (UID: "5cb1719e-962f-436c-bbc0-cd048de8dd14"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 15:39:12 crc kubenswrapper[5003]: I1206 15:39:12.698278 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "registry-storage") pod "5cb1719e-962f-436c-bbc0-cd048de8dd14" (UID: "5cb1719e-962f-436c-bbc0-cd048de8dd14"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Dec 06 15:39:12 crc kubenswrapper[5003]: I1206 15:39:12.709642 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5cb1719e-962f-436c-bbc0-cd048de8dd14-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "5cb1719e-962f-436c-bbc0-cd048de8dd14" (UID: "5cb1719e-962f-436c-bbc0-cd048de8dd14"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 15:39:12 crc kubenswrapper[5003]: I1206 15:39:12.786059 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-htvtr\" (UniqueName: \"kubernetes.io/projected/5cb1719e-962f-436c-bbc0-cd048de8dd14-kube-api-access-htvtr\") on node \"crc\" DevicePath \"\"" Dec 06 15:39:12 crc kubenswrapper[5003]: I1206 15:39:12.786162 5003 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/5cb1719e-962f-436c-bbc0-cd048de8dd14-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Dec 06 15:39:12 crc kubenswrapper[5003]: I1206 15:39:12.786188 5003 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/5cb1719e-962f-436c-bbc0-cd048de8dd14-registry-tls\") on node \"crc\" DevicePath \"\"" Dec 06 15:39:12 crc kubenswrapper[5003]: I1206 15:39:12.786209 5003 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/5cb1719e-962f-436c-bbc0-cd048de8dd14-registry-certificates\") on node \"crc\" DevicePath \"\"" Dec 06 15:39:12 crc kubenswrapper[5003]: I1206 15:39:12.786227 5003 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/5cb1719e-962f-436c-bbc0-cd048de8dd14-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Dec 06 15:39:12 crc kubenswrapper[5003]: I1206 15:39:12.786245 5003 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/5cb1719e-962f-436c-bbc0-cd048de8dd14-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 06 15:39:13 crc kubenswrapper[5003]: I1206 15:39:13.587640 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-4f6q7" Dec 06 15:39:13 crc kubenswrapper[5003]: I1206 15:39:13.625040 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-4f6q7" Dec 06 15:39:13 crc kubenswrapper[5003]: I1206 15:39:13.642480 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-tvwvh" Dec 06 15:39:13 crc kubenswrapper[5003]: I1206 15:39:13.642671 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-tvwvh" event={"ID":"5cb1719e-962f-436c-bbc0-cd048de8dd14","Type":"ContainerDied","Data":"90494900ff5d14fe879dcc8bcbf8607bb3ec4c745033e0b46a897ac5286fe76c"} Dec 06 15:39:13 crc kubenswrapper[5003]: I1206 15:39:13.678076 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-tvwvh"] Dec 06 15:39:13 crc kubenswrapper[5003]: I1206 15:39:13.685594 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-tvwvh"] Dec 06 15:39:13 crc kubenswrapper[5003]: I1206 15:39:13.719581 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5cb1719e-962f-436c-bbc0-cd048de8dd14" path="/var/lib/kubelet/pods/5cb1719e-962f-436c-bbc0-cd048de8dd14/volumes" Dec 06 15:39:18 crc kubenswrapper[5003]: I1206 15:39:18.572681 5003 patch_prober.go:28] interesting pod/machine-config-daemon-w25db container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 06 15:39:18 crc kubenswrapper[5003]: I1206 15:39:18.573171 5003 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-w25db" podUID="1a047c4d-003e-4668-9b96-945eab34ab68" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 06 15:39:18 crc kubenswrapper[5003]: I1206 15:39:18.573261 5003 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-w25db" Dec 06 15:39:18 crc kubenswrapper[5003]: I1206 15:39:18.574371 5003 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"1af03e52d38341c59b3fe6c255d6e745c45e38f27243b62c0f773f1214c39c22"} pod="openshift-machine-config-operator/machine-config-daemon-w25db" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 06 15:39:18 crc kubenswrapper[5003]: I1206 15:39:18.574538 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-w25db" podUID="1a047c4d-003e-4668-9b96-945eab34ab68" containerName="machine-config-daemon" containerID="cri-o://1af03e52d38341c59b3fe6c255d6e745c45e38f27243b62c0f773f1214c39c22" gracePeriod=600 Dec 06 15:39:19 crc kubenswrapper[5003]: I1206 15:39:19.672805 5003 generic.go:334] "Generic (PLEG): container finished" podID="1a047c4d-003e-4668-9b96-945eab34ab68" containerID="1af03e52d38341c59b3fe6c255d6e745c45e38f27243b62c0f773f1214c39c22" exitCode=0 Dec 06 15:39:19 crc kubenswrapper[5003]: I1206 15:39:19.673311 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-w25db" event={"ID":"1a047c4d-003e-4668-9b96-945eab34ab68","Type":"ContainerDied","Data":"1af03e52d38341c59b3fe6c255d6e745c45e38f27243b62c0f773f1214c39c22"} Dec 06 15:39:19 crc kubenswrapper[5003]: I1206 15:39:19.673340 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-w25db" event={"ID":"1a047c4d-003e-4668-9b96-945eab34ab68","Type":"ContainerStarted","Data":"2884742c76d7cbd408daae95be703af27258f3a02a099bf1533eb0a640ac725d"} Dec 06 15:39:19 crc kubenswrapper[5003]: I1206 15:39:19.673356 5003 scope.go:117] "RemoveContainer" containerID="b457ba1d45f5e1524b8539c4c7dc77b24f6d5aa8172b46962e31e4fb6723b7ba" Dec 06 15:41:18 crc kubenswrapper[5003]: I1206 15:41:18.573329 5003 patch_prober.go:28] interesting pod/machine-config-daemon-w25db container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 06 15:41:18 crc kubenswrapper[5003]: I1206 15:41:18.573909 5003 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-w25db" podUID="1a047c4d-003e-4668-9b96-945eab34ab68" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 06 15:41:48 crc kubenswrapper[5003]: I1206 15:41:48.573305 5003 patch_prober.go:28] interesting pod/machine-config-daemon-w25db container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 06 15:41:48 crc kubenswrapper[5003]: I1206 15:41:48.575241 5003 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-w25db" podUID="1a047c4d-003e-4668-9b96-945eab34ab68" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 06 15:42:18 crc kubenswrapper[5003]: I1206 15:42:18.573296 5003 patch_prober.go:28] interesting pod/machine-config-daemon-w25db container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 06 15:42:18 crc kubenswrapper[5003]: I1206 15:42:18.573739 5003 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-w25db" podUID="1a047c4d-003e-4668-9b96-945eab34ab68" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 06 15:42:18 crc kubenswrapper[5003]: I1206 15:42:18.573813 5003 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-w25db" Dec 06 15:42:18 crc kubenswrapper[5003]: I1206 15:42:18.574244 5003 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"2884742c76d7cbd408daae95be703af27258f3a02a099bf1533eb0a640ac725d"} pod="openshift-machine-config-operator/machine-config-daemon-w25db" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 06 15:42:18 crc kubenswrapper[5003]: I1206 15:42:18.574298 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-w25db" podUID="1a047c4d-003e-4668-9b96-945eab34ab68" containerName="machine-config-daemon" containerID="cri-o://2884742c76d7cbd408daae95be703af27258f3a02a099bf1533eb0a640ac725d" gracePeriod=600 Dec 06 15:42:18 crc kubenswrapper[5003]: I1206 15:42:18.745615 5003 generic.go:334] "Generic (PLEG): container finished" podID="1a047c4d-003e-4668-9b96-945eab34ab68" containerID="2884742c76d7cbd408daae95be703af27258f3a02a099bf1533eb0a640ac725d" exitCode=0 Dec 06 15:42:18 crc kubenswrapper[5003]: I1206 15:42:18.745717 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-w25db" event={"ID":"1a047c4d-003e-4668-9b96-945eab34ab68","Type":"ContainerDied","Data":"2884742c76d7cbd408daae95be703af27258f3a02a099bf1533eb0a640ac725d"} Dec 06 15:42:18 crc kubenswrapper[5003]: I1206 15:42:18.745863 5003 scope.go:117] "RemoveContainer" containerID="1af03e52d38341c59b3fe6c255d6e745c45e38f27243b62c0f773f1214c39c22" Dec 06 15:42:19 crc kubenswrapper[5003]: I1206 15:42:19.754408 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-w25db" event={"ID":"1a047c4d-003e-4668-9b96-945eab34ab68","Type":"ContainerStarted","Data":"be0c79caa5d9dd2c20871e4f314e65be4eca0cd4c85743186922c15f8ac77ad4"} Dec 06 15:44:18 crc kubenswrapper[5003]: I1206 15:44:18.573000 5003 patch_prober.go:28] interesting pod/machine-config-daemon-w25db container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 06 15:44:18 crc kubenswrapper[5003]: I1206 15:44:18.573695 5003 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-w25db" podUID="1a047c4d-003e-4668-9b96-945eab34ab68" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 06 15:44:45 crc kubenswrapper[5003]: I1206 15:44:45.887876 5003 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Dec 06 15:44:48 crc kubenswrapper[5003]: I1206 15:44:48.573112 5003 patch_prober.go:28] interesting pod/machine-config-daemon-w25db container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 06 15:44:48 crc kubenswrapper[5003]: I1206 15:44:48.573169 5003 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-w25db" podUID="1a047c4d-003e-4668-9b96-945eab34ab68" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 06 15:45:00 crc kubenswrapper[5003]: I1206 15:45:00.176637 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29417265-xmlz5"] Dec 06 15:45:00 crc kubenswrapper[5003]: E1206 15:45:00.177378 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5cb1719e-962f-436c-bbc0-cd048de8dd14" containerName="registry" Dec 06 15:45:00 crc kubenswrapper[5003]: I1206 15:45:00.177393 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="5cb1719e-962f-436c-bbc0-cd048de8dd14" containerName="registry" Dec 06 15:45:00 crc kubenswrapper[5003]: I1206 15:45:00.177508 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="5cb1719e-962f-436c-bbc0-cd048de8dd14" containerName="registry" Dec 06 15:45:00 crc kubenswrapper[5003]: I1206 15:45:00.177904 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29417265-xmlz5" Dec 06 15:45:00 crc kubenswrapper[5003]: I1206 15:45:00.179821 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 06 15:45:00 crc kubenswrapper[5003]: I1206 15:45:00.179853 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 06 15:45:00 crc kubenswrapper[5003]: I1206 15:45:00.186272 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29417265-xmlz5"] Dec 06 15:45:00 crc kubenswrapper[5003]: I1206 15:45:00.307537 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/368df4f0-f839-4fe1-882f-53504dbf4804-config-volume\") pod \"collect-profiles-29417265-xmlz5\" (UID: \"368df4f0-f839-4fe1-882f-53504dbf4804\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29417265-xmlz5" Dec 06 15:45:00 crc kubenswrapper[5003]: I1206 15:45:00.307700 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/368df4f0-f839-4fe1-882f-53504dbf4804-secret-volume\") pod \"collect-profiles-29417265-xmlz5\" (UID: \"368df4f0-f839-4fe1-882f-53504dbf4804\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29417265-xmlz5" Dec 06 15:45:00 crc kubenswrapper[5003]: I1206 15:45:00.307745 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-25fd6\" (UniqueName: \"kubernetes.io/projected/368df4f0-f839-4fe1-882f-53504dbf4804-kube-api-access-25fd6\") pod \"collect-profiles-29417265-xmlz5\" (UID: \"368df4f0-f839-4fe1-882f-53504dbf4804\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29417265-xmlz5" Dec 06 15:45:00 crc kubenswrapper[5003]: I1206 15:45:00.408523 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-25fd6\" (UniqueName: \"kubernetes.io/projected/368df4f0-f839-4fe1-882f-53504dbf4804-kube-api-access-25fd6\") pod \"collect-profiles-29417265-xmlz5\" (UID: \"368df4f0-f839-4fe1-882f-53504dbf4804\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29417265-xmlz5" Dec 06 15:45:00 crc kubenswrapper[5003]: I1206 15:45:00.408815 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/368df4f0-f839-4fe1-882f-53504dbf4804-config-volume\") pod \"collect-profiles-29417265-xmlz5\" (UID: \"368df4f0-f839-4fe1-882f-53504dbf4804\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29417265-xmlz5" Dec 06 15:45:00 crc kubenswrapper[5003]: I1206 15:45:00.408957 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/368df4f0-f839-4fe1-882f-53504dbf4804-secret-volume\") pod \"collect-profiles-29417265-xmlz5\" (UID: \"368df4f0-f839-4fe1-882f-53504dbf4804\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29417265-xmlz5" Dec 06 15:45:00 crc kubenswrapper[5003]: I1206 15:45:00.410877 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/368df4f0-f839-4fe1-882f-53504dbf4804-config-volume\") pod \"collect-profiles-29417265-xmlz5\" (UID: \"368df4f0-f839-4fe1-882f-53504dbf4804\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29417265-xmlz5" Dec 06 15:45:00 crc kubenswrapper[5003]: I1206 15:45:00.422276 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/368df4f0-f839-4fe1-882f-53504dbf4804-secret-volume\") pod \"collect-profiles-29417265-xmlz5\" (UID: \"368df4f0-f839-4fe1-882f-53504dbf4804\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29417265-xmlz5" Dec 06 15:45:00 crc kubenswrapper[5003]: I1206 15:45:00.424846 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-25fd6\" (UniqueName: \"kubernetes.io/projected/368df4f0-f839-4fe1-882f-53504dbf4804-kube-api-access-25fd6\") pod \"collect-profiles-29417265-xmlz5\" (UID: \"368df4f0-f839-4fe1-882f-53504dbf4804\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29417265-xmlz5" Dec 06 15:45:00 crc kubenswrapper[5003]: I1206 15:45:00.494032 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29417265-xmlz5" Dec 06 15:45:00 crc kubenswrapper[5003]: I1206 15:45:00.719304 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29417265-xmlz5"] Dec 06 15:45:01 crc kubenswrapper[5003]: I1206 15:45:01.731246 5003 generic.go:334] "Generic (PLEG): container finished" podID="368df4f0-f839-4fe1-882f-53504dbf4804" containerID="d13203d0b55dd0a0fcef16e27c153359f71f51b5c16c97d88fd5e49289eaef47" exitCode=0 Dec 06 15:45:01 crc kubenswrapper[5003]: I1206 15:45:01.731403 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29417265-xmlz5" event={"ID":"368df4f0-f839-4fe1-882f-53504dbf4804","Type":"ContainerDied","Data":"d13203d0b55dd0a0fcef16e27c153359f71f51b5c16c97d88fd5e49289eaef47"} Dec 06 15:45:01 crc kubenswrapper[5003]: I1206 15:45:01.731562 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29417265-xmlz5" event={"ID":"368df4f0-f839-4fe1-882f-53504dbf4804","Type":"ContainerStarted","Data":"fe8557adf09e8f72d8bc7a6ae6c690d96fc794ba97edfa2a28cb9bd3b1d52fff"} Dec 06 15:45:02 crc kubenswrapper[5003]: I1206 15:45:02.950976 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29417265-xmlz5" Dec 06 15:45:03 crc kubenswrapper[5003]: I1206 15:45:03.140033 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/368df4f0-f839-4fe1-882f-53504dbf4804-secret-volume\") pod \"368df4f0-f839-4fe1-882f-53504dbf4804\" (UID: \"368df4f0-f839-4fe1-882f-53504dbf4804\") " Dec 06 15:45:03 crc kubenswrapper[5003]: I1206 15:45:03.140207 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/368df4f0-f839-4fe1-882f-53504dbf4804-config-volume\") pod \"368df4f0-f839-4fe1-882f-53504dbf4804\" (UID: \"368df4f0-f839-4fe1-882f-53504dbf4804\") " Dec 06 15:45:03 crc kubenswrapper[5003]: I1206 15:45:03.140266 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-25fd6\" (UniqueName: \"kubernetes.io/projected/368df4f0-f839-4fe1-882f-53504dbf4804-kube-api-access-25fd6\") pod \"368df4f0-f839-4fe1-882f-53504dbf4804\" (UID: \"368df4f0-f839-4fe1-882f-53504dbf4804\") " Dec 06 15:45:03 crc kubenswrapper[5003]: I1206 15:45:03.140925 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/368df4f0-f839-4fe1-882f-53504dbf4804-config-volume" (OuterVolumeSpecName: "config-volume") pod "368df4f0-f839-4fe1-882f-53504dbf4804" (UID: "368df4f0-f839-4fe1-882f-53504dbf4804"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 15:45:03 crc kubenswrapper[5003]: I1206 15:45:03.145121 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/368df4f0-f839-4fe1-882f-53504dbf4804-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "368df4f0-f839-4fe1-882f-53504dbf4804" (UID: "368df4f0-f839-4fe1-882f-53504dbf4804"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 15:45:03 crc kubenswrapper[5003]: I1206 15:45:03.146067 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/368df4f0-f839-4fe1-882f-53504dbf4804-kube-api-access-25fd6" (OuterVolumeSpecName: "kube-api-access-25fd6") pod "368df4f0-f839-4fe1-882f-53504dbf4804" (UID: "368df4f0-f839-4fe1-882f-53504dbf4804"). InnerVolumeSpecName "kube-api-access-25fd6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 15:45:03 crc kubenswrapper[5003]: I1206 15:45:03.241384 5003 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/368df4f0-f839-4fe1-882f-53504dbf4804-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 06 15:45:03 crc kubenswrapper[5003]: I1206 15:45:03.241435 5003 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/368df4f0-f839-4fe1-882f-53504dbf4804-config-volume\") on node \"crc\" DevicePath \"\"" Dec 06 15:45:03 crc kubenswrapper[5003]: I1206 15:45:03.241445 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-25fd6\" (UniqueName: \"kubernetes.io/projected/368df4f0-f839-4fe1-882f-53504dbf4804-kube-api-access-25fd6\") on node \"crc\" DevicePath \"\"" Dec 06 15:45:03 crc kubenswrapper[5003]: I1206 15:45:03.745542 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29417265-xmlz5" event={"ID":"368df4f0-f839-4fe1-882f-53504dbf4804","Type":"ContainerDied","Data":"fe8557adf09e8f72d8bc7a6ae6c690d96fc794ba97edfa2a28cb9bd3b1d52fff"} Dec 06 15:45:03 crc kubenswrapper[5003]: I1206 15:45:03.745617 5003 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="fe8557adf09e8f72d8bc7a6ae6c690d96fc794ba97edfa2a28cb9bd3b1d52fff" Dec 06 15:45:03 crc kubenswrapper[5003]: I1206 15:45:03.745620 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29417265-xmlz5" Dec 06 15:45:18 crc kubenswrapper[5003]: I1206 15:45:18.572767 5003 patch_prober.go:28] interesting pod/machine-config-daemon-w25db container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 06 15:45:18 crc kubenswrapper[5003]: I1206 15:45:18.573382 5003 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-w25db" podUID="1a047c4d-003e-4668-9b96-945eab34ab68" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 06 15:45:18 crc kubenswrapper[5003]: I1206 15:45:18.573429 5003 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-w25db" Dec 06 15:45:18 crc kubenswrapper[5003]: I1206 15:45:18.574033 5003 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"be0c79caa5d9dd2c20871e4f314e65be4eca0cd4c85743186922c15f8ac77ad4"} pod="openshift-machine-config-operator/machine-config-daemon-w25db" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 06 15:45:18 crc kubenswrapper[5003]: I1206 15:45:18.574101 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-w25db" podUID="1a047c4d-003e-4668-9b96-945eab34ab68" containerName="machine-config-daemon" containerID="cri-o://be0c79caa5d9dd2c20871e4f314e65be4eca0cd4c85743186922c15f8ac77ad4" gracePeriod=600 Dec 06 15:45:18 crc kubenswrapper[5003]: I1206 15:45:18.821668 5003 generic.go:334] "Generic (PLEG): container finished" podID="1a047c4d-003e-4668-9b96-945eab34ab68" containerID="be0c79caa5d9dd2c20871e4f314e65be4eca0cd4c85743186922c15f8ac77ad4" exitCode=0 Dec 06 15:45:18 crc kubenswrapper[5003]: I1206 15:45:18.821733 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-w25db" event={"ID":"1a047c4d-003e-4668-9b96-945eab34ab68","Type":"ContainerDied","Data":"be0c79caa5d9dd2c20871e4f314e65be4eca0cd4c85743186922c15f8ac77ad4"} Dec 06 15:45:18 crc kubenswrapper[5003]: I1206 15:45:18.822011 5003 scope.go:117] "RemoveContainer" containerID="2884742c76d7cbd408daae95be703af27258f3a02a099bf1533eb0a640ac725d" Dec 06 15:45:19 crc kubenswrapper[5003]: I1206 15:45:19.831563 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-w25db" event={"ID":"1a047c4d-003e-4668-9b96-945eab34ab68","Type":"ContainerStarted","Data":"bf4c0e939e0839bd8579c450bf673f46cb54e6312b28fa28edd3fa3c1fe6713b"} Dec 06 15:45:23 crc kubenswrapper[5003]: I1206 15:45:23.400871 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-p7xwd"] Dec 06 15:45:23 crc kubenswrapper[5003]: I1206 15:45:23.402061 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-p7xwd" podUID="8a695d94-271c-45bc-8a89-dfdecb57ec00" containerName="ovn-controller" containerID="cri-o://276d6d2f680f4e0b2038d12b161e3fd3f85417b5d776b9661559b4812ead1dd9" gracePeriod=30 Dec 06 15:45:23 crc kubenswrapper[5003]: I1206 15:45:23.402128 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-p7xwd" podUID="8a695d94-271c-45bc-8a89-dfdecb57ec00" containerName="nbdb" containerID="cri-o://5240dcf05f3d661b92408f4c39999571d023d9acdeb31d4a4a99f50e31a627b6" gracePeriod=30 Dec 06 15:45:23 crc kubenswrapper[5003]: I1206 15:45:23.402187 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-p7xwd" podUID="8a695d94-271c-45bc-8a89-dfdecb57ec00" containerName="northd" containerID="cri-o://e240d517f92fb2b8e24bb6f1c1d98869b41adf8bc3ab687a4426840c7010235e" gracePeriod=30 Dec 06 15:45:23 crc kubenswrapper[5003]: I1206 15:45:23.402238 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-p7xwd" podUID="8a695d94-271c-45bc-8a89-dfdecb57ec00" containerName="kube-rbac-proxy-ovn-metrics" containerID="cri-o://b549c3d1ba39068a4f1ce344e0807eed3b4e73ee1902d02cebffe005697201d2" gracePeriod=30 Dec 06 15:45:23 crc kubenswrapper[5003]: I1206 15:45:23.402280 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-p7xwd" podUID="8a695d94-271c-45bc-8a89-dfdecb57ec00" containerName="kube-rbac-proxy-node" containerID="cri-o://b26a01a6a0644d88028ebc041e00353091f26407f109e02e4aec1ec0e382a4c6" gracePeriod=30 Dec 06 15:45:23 crc kubenswrapper[5003]: I1206 15:45:23.402330 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-p7xwd" podUID="8a695d94-271c-45bc-8a89-dfdecb57ec00" containerName="ovn-acl-logging" containerID="cri-o://b871ea5e9d2093579af10786c02da9d7f5afa5351933be742b67b1b682733119" gracePeriod=30 Dec 06 15:45:23 crc kubenswrapper[5003]: I1206 15:45:23.402592 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-p7xwd" podUID="8a695d94-271c-45bc-8a89-dfdecb57ec00" containerName="sbdb" containerID="cri-o://c56c8c7aa570bb81c41923b20b09d6de6b278e56ca466e7e0ee3cecf113c37d6" gracePeriod=30 Dec 06 15:45:23 crc kubenswrapper[5003]: I1206 15:45:23.440355 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-p7xwd" podUID="8a695d94-271c-45bc-8a89-dfdecb57ec00" containerName="ovnkube-controller" containerID="cri-o://6a2972bc7291d2adcafac608a303008342fc7672795ad47464e77f3413f5822b" gracePeriod=30 Dec 06 15:45:23 crc kubenswrapper[5003]: I1206 15:45:23.747804 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-p7xwd_8a695d94-271c-45bc-8a89-dfdecb57ec00/ovnkube-controller/3.log" Dec 06 15:45:23 crc kubenswrapper[5003]: I1206 15:45:23.750253 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-p7xwd_8a695d94-271c-45bc-8a89-dfdecb57ec00/ovn-acl-logging/0.log" Dec 06 15:45:23 crc kubenswrapper[5003]: I1206 15:45:23.751115 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-p7xwd_8a695d94-271c-45bc-8a89-dfdecb57ec00/ovn-controller/0.log" Dec 06 15:45:23 crc kubenswrapper[5003]: I1206 15:45:23.751556 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-p7xwd" Dec 06 15:45:23 crc kubenswrapper[5003]: I1206 15:45:23.803307 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-cdmjt"] Dec 06 15:45:23 crc kubenswrapper[5003]: E1206 15:45:23.803573 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8a695d94-271c-45bc-8a89-dfdecb57ec00" containerName="nbdb" Dec 06 15:45:23 crc kubenswrapper[5003]: I1206 15:45:23.803591 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="8a695d94-271c-45bc-8a89-dfdecb57ec00" containerName="nbdb" Dec 06 15:45:23 crc kubenswrapper[5003]: E1206 15:45:23.803605 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8a695d94-271c-45bc-8a89-dfdecb57ec00" containerName="kube-rbac-proxy-node" Dec 06 15:45:23 crc kubenswrapper[5003]: I1206 15:45:23.803612 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="8a695d94-271c-45bc-8a89-dfdecb57ec00" containerName="kube-rbac-proxy-node" Dec 06 15:45:23 crc kubenswrapper[5003]: E1206 15:45:23.803619 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8a695d94-271c-45bc-8a89-dfdecb57ec00" containerName="ovnkube-controller" Dec 06 15:45:23 crc kubenswrapper[5003]: I1206 15:45:23.803625 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="8a695d94-271c-45bc-8a89-dfdecb57ec00" containerName="ovnkube-controller" Dec 06 15:45:23 crc kubenswrapper[5003]: E1206 15:45:23.803632 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8a695d94-271c-45bc-8a89-dfdecb57ec00" containerName="sbdb" Dec 06 15:45:23 crc kubenswrapper[5003]: I1206 15:45:23.803637 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="8a695d94-271c-45bc-8a89-dfdecb57ec00" containerName="sbdb" Dec 06 15:45:23 crc kubenswrapper[5003]: E1206 15:45:23.803646 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8a695d94-271c-45bc-8a89-dfdecb57ec00" containerName="northd" Dec 06 15:45:23 crc kubenswrapper[5003]: I1206 15:45:23.803655 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="8a695d94-271c-45bc-8a89-dfdecb57ec00" containerName="northd" Dec 06 15:45:23 crc kubenswrapper[5003]: E1206 15:45:23.803670 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="368df4f0-f839-4fe1-882f-53504dbf4804" containerName="collect-profiles" Dec 06 15:45:23 crc kubenswrapper[5003]: I1206 15:45:23.803690 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="368df4f0-f839-4fe1-882f-53504dbf4804" containerName="collect-profiles" Dec 06 15:45:23 crc kubenswrapper[5003]: E1206 15:45:23.803704 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8a695d94-271c-45bc-8a89-dfdecb57ec00" containerName="ovn-controller" Dec 06 15:45:23 crc kubenswrapper[5003]: I1206 15:45:23.803710 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="8a695d94-271c-45bc-8a89-dfdecb57ec00" containerName="ovn-controller" Dec 06 15:45:23 crc kubenswrapper[5003]: E1206 15:45:23.803717 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8a695d94-271c-45bc-8a89-dfdecb57ec00" containerName="ovnkube-controller" Dec 06 15:45:23 crc kubenswrapper[5003]: I1206 15:45:23.803723 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="8a695d94-271c-45bc-8a89-dfdecb57ec00" containerName="ovnkube-controller" Dec 06 15:45:23 crc kubenswrapper[5003]: E1206 15:45:23.803733 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8a695d94-271c-45bc-8a89-dfdecb57ec00" containerName="kubecfg-setup" Dec 06 15:45:23 crc kubenswrapper[5003]: I1206 15:45:23.803739 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="8a695d94-271c-45bc-8a89-dfdecb57ec00" containerName="kubecfg-setup" Dec 06 15:45:23 crc kubenswrapper[5003]: E1206 15:45:23.803745 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8a695d94-271c-45bc-8a89-dfdecb57ec00" containerName="ovn-acl-logging" Dec 06 15:45:23 crc kubenswrapper[5003]: I1206 15:45:23.803751 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="8a695d94-271c-45bc-8a89-dfdecb57ec00" containerName="ovn-acl-logging" Dec 06 15:45:23 crc kubenswrapper[5003]: E1206 15:45:23.803758 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8a695d94-271c-45bc-8a89-dfdecb57ec00" containerName="kube-rbac-proxy-ovn-metrics" Dec 06 15:45:23 crc kubenswrapper[5003]: I1206 15:45:23.803768 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="8a695d94-271c-45bc-8a89-dfdecb57ec00" containerName="kube-rbac-proxy-ovn-metrics" Dec 06 15:45:23 crc kubenswrapper[5003]: E1206 15:45:23.803782 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8a695d94-271c-45bc-8a89-dfdecb57ec00" containerName="ovnkube-controller" Dec 06 15:45:23 crc kubenswrapper[5003]: I1206 15:45:23.803789 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="8a695d94-271c-45bc-8a89-dfdecb57ec00" containerName="ovnkube-controller" Dec 06 15:45:23 crc kubenswrapper[5003]: E1206 15:45:23.803797 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8a695d94-271c-45bc-8a89-dfdecb57ec00" containerName="ovnkube-controller" Dec 06 15:45:23 crc kubenswrapper[5003]: I1206 15:45:23.803803 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="8a695d94-271c-45bc-8a89-dfdecb57ec00" containerName="ovnkube-controller" Dec 06 15:45:23 crc kubenswrapper[5003]: I1206 15:45:23.803916 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="8a695d94-271c-45bc-8a89-dfdecb57ec00" containerName="ovnkube-controller" Dec 06 15:45:23 crc kubenswrapper[5003]: I1206 15:45:23.803931 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="8a695d94-271c-45bc-8a89-dfdecb57ec00" containerName="ovn-controller" Dec 06 15:45:23 crc kubenswrapper[5003]: I1206 15:45:23.803943 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="8a695d94-271c-45bc-8a89-dfdecb57ec00" containerName="ovnkube-controller" Dec 06 15:45:23 crc kubenswrapper[5003]: I1206 15:45:23.803954 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="8a695d94-271c-45bc-8a89-dfdecb57ec00" containerName="kube-rbac-proxy-node" Dec 06 15:45:23 crc kubenswrapper[5003]: I1206 15:45:23.803963 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="8a695d94-271c-45bc-8a89-dfdecb57ec00" containerName="northd" Dec 06 15:45:23 crc kubenswrapper[5003]: I1206 15:45:23.803970 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="8a695d94-271c-45bc-8a89-dfdecb57ec00" containerName="kube-rbac-proxy-ovn-metrics" Dec 06 15:45:23 crc kubenswrapper[5003]: I1206 15:45:23.803977 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="8a695d94-271c-45bc-8a89-dfdecb57ec00" containerName="ovnkube-controller" Dec 06 15:45:23 crc kubenswrapper[5003]: I1206 15:45:23.803987 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="8a695d94-271c-45bc-8a89-dfdecb57ec00" containerName="sbdb" Dec 06 15:45:23 crc kubenswrapper[5003]: I1206 15:45:23.803997 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="8a695d94-271c-45bc-8a89-dfdecb57ec00" containerName="ovn-acl-logging" Dec 06 15:45:23 crc kubenswrapper[5003]: I1206 15:45:23.804005 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="8a695d94-271c-45bc-8a89-dfdecb57ec00" containerName="ovnkube-controller" Dec 06 15:45:23 crc kubenswrapper[5003]: I1206 15:45:23.804015 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="368df4f0-f839-4fe1-882f-53504dbf4804" containerName="collect-profiles" Dec 06 15:45:23 crc kubenswrapper[5003]: I1206 15:45:23.804022 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="8a695d94-271c-45bc-8a89-dfdecb57ec00" containerName="nbdb" Dec 06 15:45:23 crc kubenswrapper[5003]: E1206 15:45:23.804121 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8a695d94-271c-45bc-8a89-dfdecb57ec00" containerName="ovnkube-controller" Dec 06 15:45:23 crc kubenswrapper[5003]: I1206 15:45:23.804131 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="8a695d94-271c-45bc-8a89-dfdecb57ec00" containerName="ovnkube-controller" Dec 06 15:45:23 crc kubenswrapper[5003]: I1206 15:45:23.804254 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="8a695d94-271c-45bc-8a89-dfdecb57ec00" containerName="ovnkube-controller" Dec 06 15:45:23 crc kubenswrapper[5003]: I1206 15:45:23.806114 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-cdmjt" Dec 06 15:45:23 crc kubenswrapper[5003]: I1206 15:45:23.853665 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-p7xwd_8a695d94-271c-45bc-8a89-dfdecb57ec00/ovnkube-controller/3.log" Dec 06 15:45:23 crc kubenswrapper[5003]: I1206 15:45:23.855656 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-p7xwd_8a695d94-271c-45bc-8a89-dfdecb57ec00/ovn-acl-logging/0.log" Dec 06 15:45:23 crc kubenswrapper[5003]: I1206 15:45:23.856199 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-p7xwd_8a695d94-271c-45bc-8a89-dfdecb57ec00/ovn-controller/0.log" Dec 06 15:45:23 crc kubenswrapper[5003]: I1206 15:45:23.856558 5003 generic.go:334] "Generic (PLEG): container finished" podID="8a695d94-271c-45bc-8a89-dfdecb57ec00" containerID="6a2972bc7291d2adcafac608a303008342fc7672795ad47464e77f3413f5822b" exitCode=0 Dec 06 15:45:23 crc kubenswrapper[5003]: I1206 15:45:23.856583 5003 generic.go:334] "Generic (PLEG): container finished" podID="8a695d94-271c-45bc-8a89-dfdecb57ec00" containerID="c56c8c7aa570bb81c41923b20b09d6de6b278e56ca466e7e0ee3cecf113c37d6" exitCode=0 Dec 06 15:45:23 crc kubenswrapper[5003]: I1206 15:45:23.856592 5003 generic.go:334] "Generic (PLEG): container finished" podID="8a695d94-271c-45bc-8a89-dfdecb57ec00" containerID="5240dcf05f3d661b92408f4c39999571d023d9acdeb31d4a4a99f50e31a627b6" exitCode=0 Dec 06 15:45:23 crc kubenswrapper[5003]: I1206 15:45:23.856600 5003 generic.go:334] "Generic (PLEG): container finished" podID="8a695d94-271c-45bc-8a89-dfdecb57ec00" containerID="e240d517f92fb2b8e24bb6f1c1d98869b41adf8bc3ab687a4426840c7010235e" exitCode=0 Dec 06 15:45:23 crc kubenswrapper[5003]: I1206 15:45:23.856608 5003 generic.go:334] "Generic (PLEG): container finished" podID="8a695d94-271c-45bc-8a89-dfdecb57ec00" containerID="b549c3d1ba39068a4f1ce344e0807eed3b4e73ee1902d02cebffe005697201d2" exitCode=0 Dec 06 15:45:23 crc kubenswrapper[5003]: I1206 15:45:23.856618 5003 generic.go:334] "Generic (PLEG): container finished" podID="8a695d94-271c-45bc-8a89-dfdecb57ec00" containerID="b26a01a6a0644d88028ebc041e00353091f26407f109e02e4aec1ec0e382a4c6" exitCode=0 Dec 06 15:45:23 crc kubenswrapper[5003]: I1206 15:45:23.856626 5003 generic.go:334] "Generic (PLEG): container finished" podID="8a695d94-271c-45bc-8a89-dfdecb57ec00" containerID="b871ea5e9d2093579af10786c02da9d7f5afa5351933be742b67b1b682733119" exitCode=143 Dec 06 15:45:23 crc kubenswrapper[5003]: I1206 15:45:23.856631 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-p7xwd" Dec 06 15:45:23 crc kubenswrapper[5003]: I1206 15:45:23.856647 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7xwd" event={"ID":"8a695d94-271c-45bc-8a89-dfdecb57ec00","Type":"ContainerDied","Data":"6a2972bc7291d2adcafac608a303008342fc7672795ad47464e77f3413f5822b"} Dec 06 15:45:23 crc kubenswrapper[5003]: I1206 15:45:23.856674 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7xwd" event={"ID":"8a695d94-271c-45bc-8a89-dfdecb57ec00","Type":"ContainerDied","Data":"c56c8c7aa570bb81c41923b20b09d6de6b278e56ca466e7e0ee3cecf113c37d6"} Dec 06 15:45:23 crc kubenswrapper[5003]: I1206 15:45:23.856687 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7xwd" event={"ID":"8a695d94-271c-45bc-8a89-dfdecb57ec00","Type":"ContainerDied","Data":"5240dcf05f3d661b92408f4c39999571d023d9acdeb31d4a4a99f50e31a627b6"} Dec 06 15:45:23 crc kubenswrapper[5003]: I1206 15:45:23.856699 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7xwd" event={"ID":"8a695d94-271c-45bc-8a89-dfdecb57ec00","Type":"ContainerDied","Data":"e240d517f92fb2b8e24bb6f1c1d98869b41adf8bc3ab687a4426840c7010235e"} Dec 06 15:45:23 crc kubenswrapper[5003]: I1206 15:45:23.856711 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7xwd" event={"ID":"8a695d94-271c-45bc-8a89-dfdecb57ec00","Type":"ContainerDied","Data":"b549c3d1ba39068a4f1ce344e0807eed3b4e73ee1902d02cebffe005697201d2"} Dec 06 15:45:23 crc kubenswrapper[5003]: I1206 15:45:23.856736 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7xwd" event={"ID":"8a695d94-271c-45bc-8a89-dfdecb57ec00","Type":"ContainerDied","Data":"b26a01a6a0644d88028ebc041e00353091f26407f109e02e4aec1ec0e382a4c6"} Dec 06 15:45:23 crc kubenswrapper[5003]: I1206 15:45:23.856750 5003 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"9517eb57092dd5943648e1004f42c5da7be2e94b4e0460d9595a63ef5194be22"} Dec 06 15:45:23 crc kubenswrapper[5003]: I1206 15:45:23.856762 5003 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"c56c8c7aa570bb81c41923b20b09d6de6b278e56ca466e7e0ee3cecf113c37d6"} Dec 06 15:45:23 crc kubenswrapper[5003]: I1206 15:45:23.856769 5003 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"5240dcf05f3d661b92408f4c39999571d023d9acdeb31d4a4a99f50e31a627b6"} Dec 06 15:45:23 crc kubenswrapper[5003]: I1206 15:45:23.856775 5003 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"e240d517f92fb2b8e24bb6f1c1d98869b41adf8bc3ab687a4426840c7010235e"} Dec 06 15:45:23 crc kubenswrapper[5003]: I1206 15:45:23.856782 5003 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"b549c3d1ba39068a4f1ce344e0807eed3b4e73ee1902d02cebffe005697201d2"} Dec 06 15:45:23 crc kubenswrapper[5003]: I1206 15:45:23.856790 5003 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"b26a01a6a0644d88028ebc041e00353091f26407f109e02e4aec1ec0e382a4c6"} Dec 06 15:45:23 crc kubenswrapper[5003]: I1206 15:45:23.856797 5003 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"b871ea5e9d2093579af10786c02da9d7f5afa5351933be742b67b1b682733119"} Dec 06 15:45:23 crc kubenswrapper[5003]: I1206 15:45:23.856804 5003 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"276d6d2f680f4e0b2038d12b161e3fd3f85417b5d776b9661559b4812ead1dd9"} Dec 06 15:45:23 crc kubenswrapper[5003]: I1206 15:45:23.856811 5003 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"9a00902f2c2c7ca56d86553f78094f40f59ea9ef125f5a388aafd6d7fd0c20d9"} Dec 06 15:45:23 crc kubenswrapper[5003]: I1206 15:45:23.856813 5003 scope.go:117] "RemoveContainer" containerID="6a2972bc7291d2adcafac608a303008342fc7672795ad47464e77f3413f5822b" Dec 06 15:45:23 crc kubenswrapper[5003]: I1206 15:45:23.856819 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7xwd" event={"ID":"8a695d94-271c-45bc-8a89-dfdecb57ec00","Type":"ContainerDied","Data":"b871ea5e9d2093579af10786c02da9d7f5afa5351933be742b67b1b682733119"} Dec 06 15:45:23 crc kubenswrapper[5003]: I1206 15:45:23.856975 5003 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"6a2972bc7291d2adcafac608a303008342fc7672795ad47464e77f3413f5822b"} Dec 06 15:45:23 crc kubenswrapper[5003]: I1206 15:45:23.856985 5003 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"9517eb57092dd5943648e1004f42c5da7be2e94b4e0460d9595a63ef5194be22"} Dec 06 15:45:23 crc kubenswrapper[5003]: I1206 15:45:23.856992 5003 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"c56c8c7aa570bb81c41923b20b09d6de6b278e56ca466e7e0ee3cecf113c37d6"} Dec 06 15:45:23 crc kubenswrapper[5003]: I1206 15:45:23.856998 5003 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"5240dcf05f3d661b92408f4c39999571d023d9acdeb31d4a4a99f50e31a627b6"} Dec 06 15:45:23 crc kubenswrapper[5003]: I1206 15:45:23.857003 5003 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"e240d517f92fb2b8e24bb6f1c1d98869b41adf8bc3ab687a4426840c7010235e"} Dec 06 15:45:23 crc kubenswrapper[5003]: I1206 15:45:23.857008 5003 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"b549c3d1ba39068a4f1ce344e0807eed3b4e73ee1902d02cebffe005697201d2"} Dec 06 15:45:23 crc kubenswrapper[5003]: I1206 15:45:23.857013 5003 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"b26a01a6a0644d88028ebc041e00353091f26407f109e02e4aec1ec0e382a4c6"} Dec 06 15:45:23 crc kubenswrapper[5003]: I1206 15:45:23.857018 5003 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"b871ea5e9d2093579af10786c02da9d7f5afa5351933be742b67b1b682733119"} Dec 06 15:45:23 crc kubenswrapper[5003]: I1206 15:45:23.857023 5003 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"276d6d2f680f4e0b2038d12b161e3fd3f85417b5d776b9661559b4812ead1dd9"} Dec 06 15:45:23 crc kubenswrapper[5003]: I1206 15:45:23.857028 5003 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"9a00902f2c2c7ca56d86553f78094f40f59ea9ef125f5a388aafd6d7fd0c20d9"} Dec 06 15:45:23 crc kubenswrapper[5003]: I1206 15:45:23.857037 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7xwd" event={"ID":"8a695d94-271c-45bc-8a89-dfdecb57ec00","Type":"ContainerDied","Data":"276d6d2f680f4e0b2038d12b161e3fd3f85417b5d776b9661559b4812ead1dd9"} Dec 06 15:45:23 crc kubenswrapper[5003]: I1206 15:45:23.857046 5003 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"6a2972bc7291d2adcafac608a303008342fc7672795ad47464e77f3413f5822b"} Dec 06 15:45:23 crc kubenswrapper[5003]: I1206 15:45:23.857053 5003 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"9517eb57092dd5943648e1004f42c5da7be2e94b4e0460d9595a63ef5194be22"} Dec 06 15:45:23 crc kubenswrapper[5003]: I1206 15:45:23.857059 5003 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"c56c8c7aa570bb81c41923b20b09d6de6b278e56ca466e7e0ee3cecf113c37d6"} Dec 06 15:45:23 crc kubenswrapper[5003]: I1206 15:45:23.857064 5003 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"5240dcf05f3d661b92408f4c39999571d023d9acdeb31d4a4a99f50e31a627b6"} Dec 06 15:45:23 crc kubenswrapper[5003]: I1206 15:45:23.857069 5003 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"e240d517f92fb2b8e24bb6f1c1d98869b41adf8bc3ab687a4426840c7010235e"} Dec 06 15:45:23 crc kubenswrapper[5003]: I1206 15:45:23.857073 5003 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"b549c3d1ba39068a4f1ce344e0807eed3b4e73ee1902d02cebffe005697201d2"} Dec 06 15:45:23 crc kubenswrapper[5003]: I1206 15:45:23.857079 5003 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"b26a01a6a0644d88028ebc041e00353091f26407f109e02e4aec1ec0e382a4c6"} Dec 06 15:45:23 crc kubenswrapper[5003]: I1206 15:45:23.857084 5003 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"b871ea5e9d2093579af10786c02da9d7f5afa5351933be742b67b1b682733119"} Dec 06 15:45:23 crc kubenswrapper[5003]: I1206 15:45:23.857088 5003 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"276d6d2f680f4e0b2038d12b161e3fd3f85417b5d776b9661559b4812ead1dd9"} Dec 06 15:45:23 crc kubenswrapper[5003]: I1206 15:45:23.857094 5003 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"9a00902f2c2c7ca56d86553f78094f40f59ea9ef125f5a388aafd6d7fd0c20d9"} Dec 06 15:45:23 crc kubenswrapper[5003]: I1206 15:45:23.856635 5003 generic.go:334] "Generic (PLEG): container finished" podID="8a695d94-271c-45bc-8a89-dfdecb57ec00" containerID="276d6d2f680f4e0b2038d12b161e3fd3f85417b5d776b9661559b4812ead1dd9" exitCode=143 Dec 06 15:45:23 crc kubenswrapper[5003]: I1206 15:45:23.857137 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7xwd" event={"ID":"8a695d94-271c-45bc-8a89-dfdecb57ec00","Type":"ContainerDied","Data":"aabc580923acc64fe2a5e1d57e615c5b8f2423b6a65be317d0e9d742a1cbe295"} Dec 06 15:45:23 crc kubenswrapper[5003]: I1206 15:45:23.857146 5003 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"6a2972bc7291d2adcafac608a303008342fc7672795ad47464e77f3413f5822b"} Dec 06 15:45:23 crc kubenswrapper[5003]: I1206 15:45:23.857151 5003 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"9517eb57092dd5943648e1004f42c5da7be2e94b4e0460d9595a63ef5194be22"} Dec 06 15:45:23 crc kubenswrapper[5003]: I1206 15:45:23.857156 5003 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"c56c8c7aa570bb81c41923b20b09d6de6b278e56ca466e7e0ee3cecf113c37d6"} Dec 06 15:45:23 crc kubenswrapper[5003]: I1206 15:45:23.857161 5003 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"5240dcf05f3d661b92408f4c39999571d023d9acdeb31d4a4a99f50e31a627b6"} Dec 06 15:45:23 crc kubenswrapper[5003]: I1206 15:45:23.857166 5003 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"e240d517f92fb2b8e24bb6f1c1d98869b41adf8bc3ab687a4426840c7010235e"} Dec 06 15:45:23 crc kubenswrapper[5003]: I1206 15:45:23.857171 5003 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"b549c3d1ba39068a4f1ce344e0807eed3b4e73ee1902d02cebffe005697201d2"} Dec 06 15:45:23 crc kubenswrapper[5003]: I1206 15:45:23.857176 5003 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"b26a01a6a0644d88028ebc041e00353091f26407f109e02e4aec1ec0e382a4c6"} Dec 06 15:45:23 crc kubenswrapper[5003]: I1206 15:45:23.857182 5003 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"b871ea5e9d2093579af10786c02da9d7f5afa5351933be742b67b1b682733119"} Dec 06 15:45:23 crc kubenswrapper[5003]: I1206 15:45:23.857188 5003 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"276d6d2f680f4e0b2038d12b161e3fd3f85417b5d776b9661559b4812ead1dd9"} Dec 06 15:45:23 crc kubenswrapper[5003]: I1206 15:45:23.857193 5003 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"9a00902f2c2c7ca56d86553f78094f40f59ea9ef125f5a388aafd6d7fd0c20d9"} Dec 06 15:45:23 crc kubenswrapper[5003]: I1206 15:45:23.859345 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-9kdpn_350e8b9a-b7bf-4dc9-abe9-d10f7a088be3/kube-multus/2.log" Dec 06 15:45:23 crc kubenswrapper[5003]: I1206 15:45:23.859773 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-9kdpn_350e8b9a-b7bf-4dc9-abe9-d10f7a088be3/kube-multus/1.log" Dec 06 15:45:23 crc kubenswrapper[5003]: I1206 15:45:23.859805 5003 generic.go:334] "Generic (PLEG): container finished" podID="350e8b9a-b7bf-4dc9-abe9-d10f7a088be3" containerID="354d0d34ffc16523138a9bc18e8f39bb69a613fb17554414555a0bd218e7a17d" exitCode=2 Dec 06 15:45:23 crc kubenswrapper[5003]: I1206 15:45:23.859820 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-9kdpn" event={"ID":"350e8b9a-b7bf-4dc9-abe9-d10f7a088be3","Type":"ContainerDied","Data":"354d0d34ffc16523138a9bc18e8f39bb69a613fb17554414555a0bd218e7a17d"} Dec 06 15:45:23 crc kubenswrapper[5003]: I1206 15:45:23.859832 5003 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"22d16feb3425c5cac7562c4468723b0aae567d2d31db5516b3b0ce7d38d91c6b"} Dec 06 15:45:23 crc kubenswrapper[5003]: I1206 15:45:23.860144 5003 scope.go:117] "RemoveContainer" containerID="354d0d34ffc16523138a9bc18e8f39bb69a613fb17554414555a0bd218e7a17d" Dec 06 15:45:23 crc kubenswrapper[5003]: I1206 15:45:23.872113 5003 scope.go:117] "RemoveContainer" containerID="9517eb57092dd5943648e1004f42c5da7be2e94b4e0460d9595a63ef5194be22" Dec 06 15:45:23 crc kubenswrapper[5003]: I1206 15:45:23.893369 5003 scope.go:117] "RemoveContainer" containerID="c56c8c7aa570bb81c41923b20b09d6de6b278e56ca466e7e0ee3cecf113c37d6" Dec 06 15:45:23 crc kubenswrapper[5003]: I1206 15:45:23.907261 5003 scope.go:117] "RemoveContainer" containerID="5240dcf05f3d661b92408f4c39999571d023d9acdeb31d4a4a99f50e31a627b6" Dec 06 15:45:23 crc kubenswrapper[5003]: I1206 15:45:23.917560 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/8a695d94-271c-45bc-8a89-dfdecb57ec00-host-var-lib-cni-networks-ovn-kubernetes\") pod \"8a695d94-271c-45bc-8a89-dfdecb57ec00\" (UID: \"8a695d94-271c-45bc-8a89-dfdecb57ec00\") " Dec 06 15:45:23 crc kubenswrapper[5003]: I1206 15:45:23.917600 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/8a695d94-271c-45bc-8a89-dfdecb57ec00-node-log\") pod \"8a695d94-271c-45bc-8a89-dfdecb57ec00\" (UID: \"8a695d94-271c-45bc-8a89-dfdecb57ec00\") " Dec 06 15:45:23 crc kubenswrapper[5003]: I1206 15:45:23.917623 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/8a695d94-271c-45bc-8a89-dfdecb57ec00-host-kubelet\") pod \"8a695d94-271c-45bc-8a89-dfdecb57ec00\" (UID: \"8a695d94-271c-45bc-8a89-dfdecb57ec00\") " Dec 06 15:45:23 crc kubenswrapper[5003]: I1206 15:45:23.917690 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/8a695d94-271c-45bc-8a89-dfdecb57ec00-env-overrides\") pod \"8a695d94-271c-45bc-8a89-dfdecb57ec00\" (UID: \"8a695d94-271c-45bc-8a89-dfdecb57ec00\") " Dec 06 15:45:23 crc kubenswrapper[5003]: I1206 15:45:23.917696 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/8a695d94-271c-45bc-8a89-dfdecb57ec00-host-var-lib-cni-networks-ovn-kubernetes" (OuterVolumeSpecName: "host-var-lib-cni-networks-ovn-kubernetes") pod "8a695d94-271c-45bc-8a89-dfdecb57ec00" (UID: "8a695d94-271c-45bc-8a89-dfdecb57ec00"). InnerVolumeSpecName "host-var-lib-cni-networks-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 06 15:45:23 crc kubenswrapper[5003]: I1206 15:45:23.917724 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k5gxl\" (UniqueName: \"kubernetes.io/projected/8a695d94-271c-45bc-8a89-dfdecb57ec00-kube-api-access-k5gxl\") pod \"8a695d94-271c-45bc-8a89-dfdecb57ec00\" (UID: \"8a695d94-271c-45bc-8a89-dfdecb57ec00\") " Dec 06 15:45:23 crc kubenswrapper[5003]: I1206 15:45:23.917743 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/8a695d94-271c-45bc-8a89-dfdecb57ec00-host-kubelet" (OuterVolumeSpecName: "host-kubelet") pod "8a695d94-271c-45bc-8a89-dfdecb57ec00" (UID: "8a695d94-271c-45bc-8a89-dfdecb57ec00"). InnerVolumeSpecName "host-kubelet". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 06 15:45:23 crc kubenswrapper[5003]: I1206 15:45:23.917748 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/8a695d94-271c-45bc-8a89-dfdecb57ec00-host-run-netns\") pod \"8a695d94-271c-45bc-8a89-dfdecb57ec00\" (UID: \"8a695d94-271c-45bc-8a89-dfdecb57ec00\") " Dec 06 15:45:23 crc kubenswrapper[5003]: I1206 15:45:23.917750 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/8a695d94-271c-45bc-8a89-dfdecb57ec00-node-log" (OuterVolumeSpecName: "node-log") pod "8a695d94-271c-45bc-8a89-dfdecb57ec00" (UID: "8a695d94-271c-45bc-8a89-dfdecb57ec00"). InnerVolumeSpecName "node-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 06 15:45:23 crc kubenswrapper[5003]: I1206 15:45:23.917819 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/8a695d94-271c-45bc-8a89-dfdecb57ec00-run-ovn\") pod \"8a695d94-271c-45bc-8a89-dfdecb57ec00\" (UID: \"8a695d94-271c-45bc-8a89-dfdecb57ec00\") " Dec 06 15:45:23 crc kubenswrapper[5003]: I1206 15:45:23.917800 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/8a695d94-271c-45bc-8a89-dfdecb57ec00-host-run-netns" (OuterVolumeSpecName: "host-run-netns") pod "8a695d94-271c-45bc-8a89-dfdecb57ec00" (UID: "8a695d94-271c-45bc-8a89-dfdecb57ec00"). InnerVolumeSpecName "host-run-netns". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 06 15:45:23 crc kubenswrapper[5003]: I1206 15:45:23.917855 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/8a695d94-271c-45bc-8a89-dfdecb57ec00-ovn-node-metrics-cert\") pod \"8a695d94-271c-45bc-8a89-dfdecb57ec00\" (UID: \"8a695d94-271c-45bc-8a89-dfdecb57ec00\") " Dec 06 15:45:23 crc kubenswrapper[5003]: I1206 15:45:23.917890 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/8a695d94-271c-45bc-8a89-dfdecb57ec00-ovnkube-script-lib\") pod \"8a695d94-271c-45bc-8a89-dfdecb57ec00\" (UID: \"8a695d94-271c-45bc-8a89-dfdecb57ec00\") " Dec 06 15:45:23 crc kubenswrapper[5003]: I1206 15:45:23.917923 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/8a695d94-271c-45bc-8a89-dfdecb57ec00-etc-openvswitch\") pod \"8a695d94-271c-45bc-8a89-dfdecb57ec00\" (UID: \"8a695d94-271c-45bc-8a89-dfdecb57ec00\") " Dec 06 15:45:23 crc kubenswrapper[5003]: I1206 15:45:23.917946 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/8a695d94-271c-45bc-8a89-dfdecb57ec00-host-run-ovn-kubernetes\") pod \"8a695d94-271c-45bc-8a89-dfdecb57ec00\" (UID: \"8a695d94-271c-45bc-8a89-dfdecb57ec00\") " Dec 06 15:45:23 crc kubenswrapper[5003]: I1206 15:45:23.917972 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/8a695d94-271c-45bc-8a89-dfdecb57ec00-ovnkube-config\") pod \"8a695d94-271c-45bc-8a89-dfdecb57ec00\" (UID: \"8a695d94-271c-45bc-8a89-dfdecb57ec00\") " Dec 06 15:45:23 crc kubenswrapper[5003]: I1206 15:45:23.918010 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/8a695d94-271c-45bc-8a89-dfdecb57ec00-run-openvswitch\") pod \"8a695d94-271c-45bc-8a89-dfdecb57ec00\" (UID: \"8a695d94-271c-45bc-8a89-dfdecb57ec00\") " Dec 06 15:45:23 crc kubenswrapper[5003]: I1206 15:45:23.918033 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/8a695d94-271c-45bc-8a89-dfdecb57ec00-host-slash\") pod \"8a695d94-271c-45bc-8a89-dfdecb57ec00\" (UID: \"8a695d94-271c-45bc-8a89-dfdecb57ec00\") " Dec 06 15:45:23 crc kubenswrapper[5003]: I1206 15:45:23.918054 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/8a695d94-271c-45bc-8a89-dfdecb57ec00-host-cni-netd\") pod \"8a695d94-271c-45bc-8a89-dfdecb57ec00\" (UID: \"8a695d94-271c-45bc-8a89-dfdecb57ec00\") " Dec 06 15:45:23 crc kubenswrapper[5003]: I1206 15:45:23.918074 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/8a695d94-271c-45bc-8a89-dfdecb57ec00-systemd-units\") pod \"8a695d94-271c-45bc-8a89-dfdecb57ec00\" (UID: \"8a695d94-271c-45bc-8a89-dfdecb57ec00\") " Dec 06 15:45:23 crc kubenswrapper[5003]: I1206 15:45:23.918097 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/8a695d94-271c-45bc-8a89-dfdecb57ec00-host-cni-bin\") pod \"8a695d94-271c-45bc-8a89-dfdecb57ec00\" (UID: \"8a695d94-271c-45bc-8a89-dfdecb57ec00\") " Dec 06 15:45:23 crc kubenswrapper[5003]: I1206 15:45:23.918124 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/8a695d94-271c-45bc-8a89-dfdecb57ec00-run-systemd\") pod \"8a695d94-271c-45bc-8a89-dfdecb57ec00\" (UID: \"8a695d94-271c-45bc-8a89-dfdecb57ec00\") " Dec 06 15:45:23 crc kubenswrapper[5003]: I1206 15:45:23.918145 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/8a695d94-271c-45bc-8a89-dfdecb57ec00-log-socket\") pod \"8a695d94-271c-45bc-8a89-dfdecb57ec00\" (UID: \"8a695d94-271c-45bc-8a89-dfdecb57ec00\") " Dec 06 15:45:23 crc kubenswrapper[5003]: I1206 15:45:23.918179 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/8a695d94-271c-45bc-8a89-dfdecb57ec00-var-lib-openvswitch\") pod \"8a695d94-271c-45bc-8a89-dfdecb57ec00\" (UID: \"8a695d94-271c-45bc-8a89-dfdecb57ec00\") " Dec 06 15:45:23 crc kubenswrapper[5003]: I1206 15:45:23.918251 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8a695d94-271c-45bc-8a89-dfdecb57ec00-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "8a695d94-271c-45bc-8a89-dfdecb57ec00" (UID: "8a695d94-271c-45bc-8a89-dfdecb57ec00"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 15:45:23 crc kubenswrapper[5003]: I1206 15:45:23.918286 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/8a695d94-271c-45bc-8a89-dfdecb57ec00-run-openvswitch" (OuterVolumeSpecName: "run-openvswitch") pod "8a695d94-271c-45bc-8a89-dfdecb57ec00" (UID: "8a695d94-271c-45bc-8a89-dfdecb57ec00"). InnerVolumeSpecName "run-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 06 15:45:23 crc kubenswrapper[5003]: I1206 15:45:23.918307 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/8a695d94-271c-45bc-8a89-dfdecb57ec00-run-ovn" (OuterVolumeSpecName: "run-ovn") pod "8a695d94-271c-45bc-8a89-dfdecb57ec00" (UID: "8a695d94-271c-45bc-8a89-dfdecb57ec00"). InnerVolumeSpecName "run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 06 15:45:23 crc kubenswrapper[5003]: I1206 15:45:23.918330 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/17f36225-1711-4e80-abb4-e9afbc3e00e0-host-cni-bin\") pod \"ovnkube-node-cdmjt\" (UID: \"17f36225-1711-4e80-abb4-e9afbc3e00e0\") " pod="openshift-ovn-kubernetes/ovnkube-node-cdmjt" Dec 06 15:45:23 crc kubenswrapper[5003]: I1206 15:45:23.918373 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/17f36225-1711-4e80-abb4-e9afbc3e00e0-run-systemd\") pod \"ovnkube-node-cdmjt\" (UID: \"17f36225-1711-4e80-abb4-e9afbc3e00e0\") " pod="openshift-ovn-kubernetes/ovnkube-node-cdmjt" Dec 06 15:45:23 crc kubenswrapper[5003]: I1206 15:45:23.918408 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/17f36225-1711-4e80-abb4-e9afbc3e00e0-log-socket\") pod \"ovnkube-node-cdmjt\" (UID: \"17f36225-1711-4e80-abb4-e9afbc3e00e0\") " pod="openshift-ovn-kubernetes/ovnkube-node-cdmjt" Dec 06 15:45:23 crc kubenswrapper[5003]: I1206 15:45:23.918441 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/17f36225-1711-4e80-abb4-e9afbc3e00e0-host-run-ovn-kubernetes\") pod \"ovnkube-node-cdmjt\" (UID: \"17f36225-1711-4e80-abb4-e9afbc3e00e0\") " pod="openshift-ovn-kubernetes/ovnkube-node-cdmjt" Dec 06 15:45:23 crc kubenswrapper[5003]: I1206 15:45:23.918470 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/17f36225-1711-4e80-abb4-e9afbc3e00e0-host-kubelet\") pod \"ovnkube-node-cdmjt\" (UID: \"17f36225-1711-4e80-abb4-e9afbc3e00e0\") " pod="openshift-ovn-kubernetes/ovnkube-node-cdmjt" Dec 06 15:45:23 crc kubenswrapper[5003]: I1206 15:45:23.918509 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/17f36225-1711-4e80-abb4-e9afbc3e00e0-etc-openvswitch\") pod \"ovnkube-node-cdmjt\" (UID: \"17f36225-1711-4e80-abb4-e9afbc3e00e0\") " pod="openshift-ovn-kubernetes/ovnkube-node-cdmjt" Dec 06 15:45:23 crc kubenswrapper[5003]: I1206 15:45:23.918535 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/17f36225-1711-4e80-abb4-e9afbc3e00e0-systemd-units\") pod \"ovnkube-node-cdmjt\" (UID: \"17f36225-1711-4e80-abb4-e9afbc3e00e0\") " pod="openshift-ovn-kubernetes/ovnkube-node-cdmjt" Dec 06 15:45:23 crc kubenswrapper[5003]: I1206 15:45:23.918553 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/17f36225-1711-4e80-abb4-e9afbc3e00e0-ovn-node-metrics-cert\") pod \"ovnkube-node-cdmjt\" (UID: \"17f36225-1711-4e80-abb4-e9afbc3e00e0\") " pod="openshift-ovn-kubernetes/ovnkube-node-cdmjt" Dec 06 15:45:23 crc kubenswrapper[5003]: I1206 15:45:23.918620 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/17f36225-1711-4e80-abb4-e9afbc3e00e0-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-cdmjt\" (UID: \"17f36225-1711-4e80-abb4-e9afbc3e00e0\") " pod="openshift-ovn-kubernetes/ovnkube-node-cdmjt" Dec 06 15:45:23 crc kubenswrapper[5003]: I1206 15:45:23.918641 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/17f36225-1711-4e80-abb4-e9afbc3e00e0-ovnkube-config\") pod \"ovnkube-node-cdmjt\" (UID: \"17f36225-1711-4e80-abb4-e9afbc3e00e0\") " pod="openshift-ovn-kubernetes/ovnkube-node-cdmjt" Dec 06 15:45:23 crc kubenswrapper[5003]: I1206 15:45:23.918668 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/17f36225-1711-4e80-abb4-e9afbc3e00e0-env-overrides\") pod \"ovnkube-node-cdmjt\" (UID: \"17f36225-1711-4e80-abb4-e9afbc3e00e0\") " pod="openshift-ovn-kubernetes/ovnkube-node-cdmjt" Dec 06 15:45:23 crc kubenswrapper[5003]: I1206 15:45:23.918687 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/17f36225-1711-4e80-abb4-e9afbc3e00e0-ovnkube-script-lib\") pod \"ovnkube-node-cdmjt\" (UID: \"17f36225-1711-4e80-abb4-e9afbc3e00e0\") " pod="openshift-ovn-kubernetes/ovnkube-node-cdmjt" Dec 06 15:45:23 crc kubenswrapper[5003]: I1206 15:45:23.918709 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/17f36225-1711-4e80-abb4-e9afbc3e00e0-run-openvswitch\") pod \"ovnkube-node-cdmjt\" (UID: \"17f36225-1711-4e80-abb4-e9afbc3e00e0\") " pod="openshift-ovn-kubernetes/ovnkube-node-cdmjt" Dec 06 15:45:23 crc kubenswrapper[5003]: I1206 15:45:23.918730 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-drqw8\" (UniqueName: \"kubernetes.io/projected/17f36225-1711-4e80-abb4-e9afbc3e00e0-kube-api-access-drqw8\") pod \"ovnkube-node-cdmjt\" (UID: \"17f36225-1711-4e80-abb4-e9afbc3e00e0\") " pod="openshift-ovn-kubernetes/ovnkube-node-cdmjt" Dec 06 15:45:23 crc kubenswrapper[5003]: I1206 15:45:23.918766 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/17f36225-1711-4e80-abb4-e9afbc3e00e0-host-slash\") pod \"ovnkube-node-cdmjt\" (UID: \"17f36225-1711-4e80-abb4-e9afbc3e00e0\") " pod="openshift-ovn-kubernetes/ovnkube-node-cdmjt" Dec 06 15:45:23 crc kubenswrapper[5003]: I1206 15:45:23.918802 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/17f36225-1711-4e80-abb4-e9afbc3e00e0-var-lib-openvswitch\") pod \"ovnkube-node-cdmjt\" (UID: \"17f36225-1711-4e80-abb4-e9afbc3e00e0\") " pod="openshift-ovn-kubernetes/ovnkube-node-cdmjt" Dec 06 15:45:23 crc kubenswrapper[5003]: I1206 15:45:23.918824 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/17f36225-1711-4e80-abb4-e9afbc3e00e0-host-run-netns\") pod \"ovnkube-node-cdmjt\" (UID: \"17f36225-1711-4e80-abb4-e9afbc3e00e0\") " pod="openshift-ovn-kubernetes/ovnkube-node-cdmjt" Dec 06 15:45:23 crc kubenswrapper[5003]: I1206 15:45:23.918844 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/17f36225-1711-4e80-abb4-e9afbc3e00e0-host-cni-netd\") pod \"ovnkube-node-cdmjt\" (UID: \"17f36225-1711-4e80-abb4-e9afbc3e00e0\") " pod="openshift-ovn-kubernetes/ovnkube-node-cdmjt" Dec 06 15:45:23 crc kubenswrapper[5003]: I1206 15:45:23.918878 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/17f36225-1711-4e80-abb4-e9afbc3e00e0-run-ovn\") pod \"ovnkube-node-cdmjt\" (UID: \"17f36225-1711-4e80-abb4-e9afbc3e00e0\") " pod="openshift-ovn-kubernetes/ovnkube-node-cdmjt" Dec 06 15:45:23 crc kubenswrapper[5003]: I1206 15:45:23.918903 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/17f36225-1711-4e80-abb4-e9afbc3e00e0-node-log\") pod \"ovnkube-node-cdmjt\" (UID: \"17f36225-1711-4e80-abb4-e9afbc3e00e0\") " pod="openshift-ovn-kubernetes/ovnkube-node-cdmjt" Dec 06 15:45:23 crc kubenswrapper[5003]: I1206 15:45:23.918944 5003 reconciler_common.go:293] "Volume detached for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/8a695d94-271c-45bc-8a89-dfdecb57ec00-host-var-lib-cni-networks-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Dec 06 15:45:23 crc kubenswrapper[5003]: I1206 15:45:23.918959 5003 reconciler_common.go:293] "Volume detached for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/8a695d94-271c-45bc-8a89-dfdecb57ec00-node-log\") on node \"crc\" DevicePath \"\"" Dec 06 15:45:23 crc kubenswrapper[5003]: I1206 15:45:23.918970 5003 reconciler_common.go:293] "Volume detached for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/8a695d94-271c-45bc-8a89-dfdecb57ec00-host-kubelet\") on node \"crc\" DevicePath \"\"" Dec 06 15:45:23 crc kubenswrapper[5003]: I1206 15:45:23.918982 5003 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/8a695d94-271c-45bc-8a89-dfdecb57ec00-env-overrides\") on node \"crc\" DevicePath \"\"" Dec 06 15:45:23 crc kubenswrapper[5003]: I1206 15:45:23.918993 5003 reconciler_common.go:293] "Volume detached for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/8a695d94-271c-45bc-8a89-dfdecb57ec00-host-run-netns\") on node \"crc\" DevicePath \"\"" Dec 06 15:45:23 crc kubenswrapper[5003]: I1206 15:45:23.919003 5003 reconciler_common.go:293] "Volume detached for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/8a695d94-271c-45bc-8a89-dfdecb57ec00-run-ovn\") on node \"crc\" DevicePath \"\"" Dec 06 15:45:23 crc kubenswrapper[5003]: I1206 15:45:23.919013 5003 reconciler_common.go:293] "Volume detached for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/8a695d94-271c-45bc-8a89-dfdecb57ec00-run-openvswitch\") on node \"crc\" DevicePath \"\"" Dec 06 15:45:23 crc kubenswrapper[5003]: I1206 15:45:23.919055 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/8a695d94-271c-45bc-8a89-dfdecb57ec00-host-slash" (OuterVolumeSpecName: "host-slash") pod "8a695d94-271c-45bc-8a89-dfdecb57ec00" (UID: "8a695d94-271c-45bc-8a89-dfdecb57ec00"). InnerVolumeSpecName "host-slash". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 06 15:45:23 crc kubenswrapper[5003]: I1206 15:45:23.919077 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/8a695d94-271c-45bc-8a89-dfdecb57ec00-host-cni-netd" (OuterVolumeSpecName: "host-cni-netd") pod "8a695d94-271c-45bc-8a89-dfdecb57ec00" (UID: "8a695d94-271c-45bc-8a89-dfdecb57ec00"). InnerVolumeSpecName "host-cni-netd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 06 15:45:23 crc kubenswrapper[5003]: I1206 15:45:23.919097 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/8a695d94-271c-45bc-8a89-dfdecb57ec00-systemd-units" (OuterVolumeSpecName: "systemd-units") pod "8a695d94-271c-45bc-8a89-dfdecb57ec00" (UID: "8a695d94-271c-45bc-8a89-dfdecb57ec00"). InnerVolumeSpecName "systemd-units". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 06 15:45:23 crc kubenswrapper[5003]: I1206 15:45:23.919116 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/8a695d94-271c-45bc-8a89-dfdecb57ec00-host-cni-bin" (OuterVolumeSpecName: "host-cni-bin") pod "8a695d94-271c-45bc-8a89-dfdecb57ec00" (UID: "8a695d94-271c-45bc-8a89-dfdecb57ec00"). InnerVolumeSpecName "host-cni-bin". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 06 15:45:23 crc kubenswrapper[5003]: I1206 15:45:23.919418 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/8a695d94-271c-45bc-8a89-dfdecb57ec00-log-socket" (OuterVolumeSpecName: "log-socket") pod "8a695d94-271c-45bc-8a89-dfdecb57ec00" (UID: "8a695d94-271c-45bc-8a89-dfdecb57ec00"). InnerVolumeSpecName "log-socket". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 06 15:45:23 crc kubenswrapper[5003]: I1206 15:45:23.919460 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/8a695d94-271c-45bc-8a89-dfdecb57ec00-var-lib-openvswitch" (OuterVolumeSpecName: "var-lib-openvswitch") pod "8a695d94-271c-45bc-8a89-dfdecb57ec00" (UID: "8a695d94-271c-45bc-8a89-dfdecb57ec00"). InnerVolumeSpecName "var-lib-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 06 15:45:23 crc kubenswrapper[5003]: I1206 15:45:23.919582 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/8a695d94-271c-45bc-8a89-dfdecb57ec00-host-run-ovn-kubernetes" (OuterVolumeSpecName: "host-run-ovn-kubernetes") pod "8a695d94-271c-45bc-8a89-dfdecb57ec00" (UID: "8a695d94-271c-45bc-8a89-dfdecb57ec00"). InnerVolumeSpecName "host-run-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 06 15:45:23 crc kubenswrapper[5003]: I1206 15:45:23.919619 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/8a695d94-271c-45bc-8a89-dfdecb57ec00-etc-openvswitch" (OuterVolumeSpecName: "etc-openvswitch") pod "8a695d94-271c-45bc-8a89-dfdecb57ec00" (UID: "8a695d94-271c-45bc-8a89-dfdecb57ec00"). InnerVolumeSpecName "etc-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 06 15:45:23 crc kubenswrapper[5003]: I1206 15:45:23.919797 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8a695d94-271c-45bc-8a89-dfdecb57ec00-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "8a695d94-271c-45bc-8a89-dfdecb57ec00" (UID: "8a695d94-271c-45bc-8a89-dfdecb57ec00"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 15:45:23 crc kubenswrapper[5003]: I1206 15:45:23.920036 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8a695d94-271c-45bc-8a89-dfdecb57ec00-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "8a695d94-271c-45bc-8a89-dfdecb57ec00" (UID: "8a695d94-271c-45bc-8a89-dfdecb57ec00"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 15:45:23 crc kubenswrapper[5003]: I1206 15:45:23.923966 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8a695d94-271c-45bc-8a89-dfdecb57ec00-kube-api-access-k5gxl" (OuterVolumeSpecName: "kube-api-access-k5gxl") pod "8a695d94-271c-45bc-8a89-dfdecb57ec00" (UID: "8a695d94-271c-45bc-8a89-dfdecb57ec00"). InnerVolumeSpecName "kube-api-access-k5gxl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 15:45:23 crc kubenswrapper[5003]: I1206 15:45:23.924338 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8a695d94-271c-45bc-8a89-dfdecb57ec00-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "8a695d94-271c-45bc-8a89-dfdecb57ec00" (UID: "8a695d94-271c-45bc-8a89-dfdecb57ec00"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 15:45:23 crc kubenswrapper[5003]: I1206 15:45:23.945037 5003 scope.go:117] "RemoveContainer" containerID="e240d517f92fb2b8e24bb6f1c1d98869b41adf8bc3ab687a4426840c7010235e" Dec 06 15:45:23 crc kubenswrapper[5003]: I1206 15:45:23.945932 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/8a695d94-271c-45bc-8a89-dfdecb57ec00-run-systemd" (OuterVolumeSpecName: "run-systemd") pod "8a695d94-271c-45bc-8a89-dfdecb57ec00" (UID: "8a695d94-271c-45bc-8a89-dfdecb57ec00"). InnerVolumeSpecName "run-systemd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 06 15:45:23 crc kubenswrapper[5003]: I1206 15:45:23.956400 5003 scope.go:117] "RemoveContainer" containerID="b549c3d1ba39068a4f1ce344e0807eed3b4e73ee1902d02cebffe005697201d2" Dec 06 15:45:23 crc kubenswrapper[5003]: I1206 15:45:23.968512 5003 scope.go:117] "RemoveContainer" containerID="b26a01a6a0644d88028ebc041e00353091f26407f109e02e4aec1ec0e382a4c6" Dec 06 15:45:23 crc kubenswrapper[5003]: I1206 15:45:23.987627 5003 scope.go:117] "RemoveContainer" containerID="b871ea5e9d2093579af10786c02da9d7f5afa5351933be742b67b1b682733119" Dec 06 15:45:24 crc kubenswrapper[5003]: I1206 15:45:24.004255 5003 scope.go:117] "RemoveContainer" containerID="276d6d2f680f4e0b2038d12b161e3fd3f85417b5d776b9661559b4812ead1dd9" Dec 06 15:45:24 crc kubenswrapper[5003]: I1206 15:45:24.020274 5003 scope.go:117] "RemoveContainer" containerID="9a00902f2c2c7ca56d86553f78094f40f59ea9ef125f5a388aafd6d7fd0c20d9" Dec 06 15:45:24 crc kubenswrapper[5003]: I1206 15:45:24.020983 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/17f36225-1711-4e80-abb4-e9afbc3e00e0-var-lib-openvswitch\") pod \"ovnkube-node-cdmjt\" (UID: \"17f36225-1711-4e80-abb4-e9afbc3e00e0\") " pod="openshift-ovn-kubernetes/ovnkube-node-cdmjt" Dec 06 15:45:24 crc kubenswrapper[5003]: I1206 15:45:24.021033 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/17f36225-1711-4e80-abb4-e9afbc3e00e0-host-run-netns\") pod \"ovnkube-node-cdmjt\" (UID: \"17f36225-1711-4e80-abb4-e9afbc3e00e0\") " pod="openshift-ovn-kubernetes/ovnkube-node-cdmjt" Dec 06 15:45:24 crc kubenswrapper[5003]: I1206 15:45:24.021059 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/17f36225-1711-4e80-abb4-e9afbc3e00e0-host-cni-netd\") pod \"ovnkube-node-cdmjt\" (UID: \"17f36225-1711-4e80-abb4-e9afbc3e00e0\") " pod="openshift-ovn-kubernetes/ovnkube-node-cdmjt" Dec 06 15:45:24 crc kubenswrapper[5003]: I1206 15:45:24.021090 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/17f36225-1711-4e80-abb4-e9afbc3e00e0-run-ovn\") pod \"ovnkube-node-cdmjt\" (UID: \"17f36225-1711-4e80-abb4-e9afbc3e00e0\") " pod="openshift-ovn-kubernetes/ovnkube-node-cdmjt" Dec 06 15:45:24 crc kubenswrapper[5003]: I1206 15:45:24.021118 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/17f36225-1711-4e80-abb4-e9afbc3e00e0-node-log\") pod \"ovnkube-node-cdmjt\" (UID: \"17f36225-1711-4e80-abb4-e9afbc3e00e0\") " pod="openshift-ovn-kubernetes/ovnkube-node-cdmjt" Dec 06 15:45:24 crc kubenswrapper[5003]: I1206 15:45:24.021139 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/17f36225-1711-4e80-abb4-e9afbc3e00e0-host-cni-bin\") pod \"ovnkube-node-cdmjt\" (UID: \"17f36225-1711-4e80-abb4-e9afbc3e00e0\") " pod="openshift-ovn-kubernetes/ovnkube-node-cdmjt" Dec 06 15:45:24 crc kubenswrapper[5003]: I1206 15:45:24.021163 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/17f36225-1711-4e80-abb4-e9afbc3e00e0-run-systemd\") pod \"ovnkube-node-cdmjt\" (UID: \"17f36225-1711-4e80-abb4-e9afbc3e00e0\") " pod="openshift-ovn-kubernetes/ovnkube-node-cdmjt" Dec 06 15:45:24 crc kubenswrapper[5003]: I1206 15:45:24.021186 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/17f36225-1711-4e80-abb4-e9afbc3e00e0-log-socket\") pod \"ovnkube-node-cdmjt\" (UID: \"17f36225-1711-4e80-abb4-e9afbc3e00e0\") " pod="openshift-ovn-kubernetes/ovnkube-node-cdmjt" Dec 06 15:45:24 crc kubenswrapper[5003]: I1206 15:45:24.021213 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/17f36225-1711-4e80-abb4-e9afbc3e00e0-host-run-ovn-kubernetes\") pod \"ovnkube-node-cdmjt\" (UID: \"17f36225-1711-4e80-abb4-e9afbc3e00e0\") " pod="openshift-ovn-kubernetes/ovnkube-node-cdmjt" Dec 06 15:45:24 crc kubenswrapper[5003]: I1206 15:45:24.021231 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/17f36225-1711-4e80-abb4-e9afbc3e00e0-host-kubelet\") pod \"ovnkube-node-cdmjt\" (UID: \"17f36225-1711-4e80-abb4-e9afbc3e00e0\") " pod="openshift-ovn-kubernetes/ovnkube-node-cdmjt" Dec 06 15:45:24 crc kubenswrapper[5003]: I1206 15:45:24.021245 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/17f36225-1711-4e80-abb4-e9afbc3e00e0-etc-openvswitch\") pod \"ovnkube-node-cdmjt\" (UID: \"17f36225-1711-4e80-abb4-e9afbc3e00e0\") " pod="openshift-ovn-kubernetes/ovnkube-node-cdmjt" Dec 06 15:45:24 crc kubenswrapper[5003]: I1206 15:45:24.021262 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/17f36225-1711-4e80-abb4-e9afbc3e00e0-systemd-units\") pod \"ovnkube-node-cdmjt\" (UID: \"17f36225-1711-4e80-abb4-e9afbc3e00e0\") " pod="openshift-ovn-kubernetes/ovnkube-node-cdmjt" Dec 06 15:45:24 crc kubenswrapper[5003]: I1206 15:45:24.021276 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/17f36225-1711-4e80-abb4-e9afbc3e00e0-ovn-node-metrics-cert\") pod \"ovnkube-node-cdmjt\" (UID: \"17f36225-1711-4e80-abb4-e9afbc3e00e0\") " pod="openshift-ovn-kubernetes/ovnkube-node-cdmjt" Dec 06 15:45:24 crc kubenswrapper[5003]: I1206 15:45:24.021291 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/17f36225-1711-4e80-abb4-e9afbc3e00e0-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-cdmjt\" (UID: \"17f36225-1711-4e80-abb4-e9afbc3e00e0\") " pod="openshift-ovn-kubernetes/ovnkube-node-cdmjt" Dec 06 15:45:24 crc kubenswrapper[5003]: I1206 15:45:24.021305 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/17f36225-1711-4e80-abb4-e9afbc3e00e0-ovnkube-config\") pod \"ovnkube-node-cdmjt\" (UID: \"17f36225-1711-4e80-abb4-e9afbc3e00e0\") " pod="openshift-ovn-kubernetes/ovnkube-node-cdmjt" Dec 06 15:45:24 crc kubenswrapper[5003]: I1206 15:45:24.021332 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/17f36225-1711-4e80-abb4-e9afbc3e00e0-env-overrides\") pod \"ovnkube-node-cdmjt\" (UID: \"17f36225-1711-4e80-abb4-e9afbc3e00e0\") " pod="openshift-ovn-kubernetes/ovnkube-node-cdmjt" Dec 06 15:45:24 crc kubenswrapper[5003]: I1206 15:45:24.021400 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/17f36225-1711-4e80-abb4-e9afbc3e00e0-ovnkube-script-lib\") pod \"ovnkube-node-cdmjt\" (UID: \"17f36225-1711-4e80-abb4-e9afbc3e00e0\") " pod="openshift-ovn-kubernetes/ovnkube-node-cdmjt" Dec 06 15:45:24 crc kubenswrapper[5003]: I1206 15:45:24.021419 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/17f36225-1711-4e80-abb4-e9afbc3e00e0-run-openvswitch\") pod \"ovnkube-node-cdmjt\" (UID: \"17f36225-1711-4e80-abb4-e9afbc3e00e0\") " pod="openshift-ovn-kubernetes/ovnkube-node-cdmjt" Dec 06 15:45:24 crc kubenswrapper[5003]: I1206 15:45:24.021436 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-drqw8\" (UniqueName: \"kubernetes.io/projected/17f36225-1711-4e80-abb4-e9afbc3e00e0-kube-api-access-drqw8\") pod \"ovnkube-node-cdmjt\" (UID: \"17f36225-1711-4e80-abb4-e9afbc3e00e0\") " pod="openshift-ovn-kubernetes/ovnkube-node-cdmjt" Dec 06 15:45:24 crc kubenswrapper[5003]: I1206 15:45:24.021509 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/17f36225-1711-4e80-abb4-e9afbc3e00e0-host-slash\") pod \"ovnkube-node-cdmjt\" (UID: \"17f36225-1711-4e80-abb4-e9afbc3e00e0\") " pod="openshift-ovn-kubernetes/ovnkube-node-cdmjt" Dec 06 15:45:24 crc kubenswrapper[5003]: I1206 15:45:24.021563 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k5gxl\" (UniqueName: \"kubernetes.io/projected/8a695d94-271c-45bc-8a89-dfdecb57ec00-kube-api-access-k5gxl\") on node \"crc\" DevicePath \"\"" Dec 06 15:45:24 crc kubenswrapper[5003]: I1206 15:45:24.021575 5003 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/8a695d94-271c-45bc-8a89-dfdecb57ec00-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Dec 06 15:45:24 crc kubenswrapper[5003]: I1206 15:45:24.021583 5003 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/8a695d94-271c-45bc-8a89-dfdecb57ec00-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Dec 06 15:45:24 crc kubenswrapper[5003]: I1206 15:45:24.021592 5003 reconciler_common.go:293] "Volume detached for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/8a695d94-271c-45bc-8a89-dfdecb57ec00-etc-openvswitch\") on node \"crc\" DevicePath \"\"" Dec 06 15:45:24 crc kubenswrapper[5003]: I1206 15:45:24.021600 5003 reconciler_common.go:293] "Volume detached for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/8a695d94-271c-45bc-8a89-dfdecb57ec00-host-run-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Dec 06 15:45:24 crc kubenswrapper[5003]: I1206 15:45:24.021609 5003 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/8a695d94-271c-45bc-8a89-dfdecb57ec00-ovnkube-config\") on node \"crc\" DevicePath \"\"" Dec 06 15:45:24 crc kubenswrapper[5003]: I1206 15:45:24.021620 5003 reconciler_common.go:293] "Volume detached for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/8a695d94-271c-45bc-8a89-dfdecb57ec00-host-slash\") on node \"crc\" DevicePath \"\"" Dec 06 15:45:24 crc kubenswrapper[5003]: I1206 15:45:24.021627 5003 reconciler_common.go:293] "Volume detached for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/8a695d94-271c-45bc-8a89-dfdecb57ec00-host-cni-netd\") on node \"crc\" DevicePath \"\"" Dec 06 15:45:24 crc kubenswrapper[5003]: I1206 15:45:24.021636 5003 reconciler_common.go:293] "Volume detached for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/8a695d94-271c-45bc-8a89-dfdecb57ec00-systemd-units\") on node \"crc\" DevicePath \"\"" Dec 06 15:45:24 crc kubenswrapper[5003]: I1206 15:45:24.021645 5003 reconciler_common.go:293] "Volume detached for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/8a695d94-271c-45bc-8a89-dfdecb57ec00-host-cni-bin\") on node \"crc\" DevicePath \"\"" Dec 06 15:45:24 crc kubenswrapper[5003]: I1206 15:45:24.021653 5003 reconciler_common.go:293] "Volume detached for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/8a695d94-271c-45bc-8a89-dfdecb57ec00-run-systemd\") on node \"crc\" DevicePath \"\"" Dec 06 15:45:24 crc kubenswrapper[5003]: I1206 15:45:24.021661 5003 reconciler_common.go:293] "Volume detached for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/8a695d94-271c-45bc-8a89-dfdecb57ec00-log-socket\") on node \"crc\" DevicePath \"\"" Dec 06 15:45:24 crc kubenswrapper[5003]: I1206 15:45:24.021670 5003 reconciler_common.go:293] "Volume detached for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/8a695d94-271c-45bc-8a89-dfdecb57ec00-var-lib-openvswitch\") on node \"crc\" DevicePath \"\"" Dec 06 15:45:24 crc kubenswrapper[5003]: I1206 15:45:24.021714 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/17f36225-1711-4e80-abb4-e9afbc3e00e0-host-slash\") pod \"ovnkube-node-cdmjt\" (UID: \"17f36225-1711-4e80-abb4-e9afbc3e00e0\") " pod="openshift-ovn-kubernetes/ovnkube-node-cdmjt" Dec 06 15:45:24 crc kubenswrapper[5003]: I1206 15:45:24.021753 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/17f36225-1711-4e80-abb4-e9afbc3e00e0-host-kubelet\") pod \"ovnkube-node-cdmjt\" (UID: \"17f36225-1711-4e80-abb4-e9afbc3e00e0\") " pod="openshift-ovn-kubernetes/ovnkube-node-cdmjt" Dec 06 15:45:24 crc kubenswrapper[5003]: I1206 15:45:24.021774 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/17f36225-1711-4e80-abb4-e9afbc3e00e0-etc-openvswitch\") pod \"ovnkube-node-cdmjt\" (UID: \"17f36225-1711-4e80-abb4-e9afbc3e00e0\") " pod="openshift-ovn-kubernetes/ovnkube-node-cdmjt" Dec 06 15:45:24 crc kubenswrapper[5003]: I1206 15:45:24.021793 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/17f36225-1711-4e80-abb4-e9afbc3e00e0-systemd-units\") pod \"ovnkube-node-cdmjt\" (UID: \"17f36225-1711-4e80-abb4-e9afbc3e00e0\") " pod="openshift-ovn-kubernetes/ovnkube-node-cdmjt" Dec 06 15:45:24 crc kubenswrapper[5003]: I1206 15:45:24.021800 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/17f36225-1711-4e80-abb4-e9afbc3e00e0-host-run-ovn-kubernetes\") pod \"ovnkube-node-cdmjt\" (UID: \"17f36225-1711-4e80-abb4-e9afbc3e00e0\") " pod="openshift-ovn-kubernetes/ovnkube-node-cdmjt" Dec 06 15:45:24 crc kubenswrapper[5003]: I1206 15:45:24.021877 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/17f36225-1711-4e80-abb4-e9afbc3e00e0-var-lib-openvswitch\") pod \"ovnkube-node-cdmjt\" (UID: \"17f36225-1711-4e80-abb4-e9afbc3e00e0\") " pod="openshift-ovn-kubernetes/ovnkube-node-cdmjt" Dec 06 15:45:24 crc kubenswrapper[5003]: I1206 15:45:24.021922 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/17f36225-1711-4e80-abb4-e9afbc3e00e0-host-run-netns\") pod \"ovnkube-node-cdmjt\" (UID: \"17f36225-1711-4e80-abb4-e9afbc3e00e0\") " pod="openshift-ovn-kubernetes/ovnkube-node-cdmjt" Dec 06 15:45:24 crc kubenswrapper[5003]: I1206 15:45:24.021952 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/17f36225-1711-4e80-abb4-e9afbc3e00e0-host-cni-netd\") pod \"ovnkube-node-cdmjt\" (UID: \"17f36225-1711-4e80-abb4-e9afbc3e00e0\") " pod="openshift-ovn-kubernetes/ovnkube-node-cdmjt" Dec 06 15:45:24 crc kubenswrapper[5003]: I1206 15:45:24.022309 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/17f36225-1711-4e80-abb4-e9afbc3e00e0-host-cni-bin\") pod \"ovnkube-node-cdmjt\" (UID: \"17f36225-1711-4e80-abb4-e9afbc3e00e0\") " pod="openshift-ovn-kubernetes/ovnkube-node-cdmjt" Dec 06 15:45:24 crc kubenswrapper[5003]: I1206 15:45:24.022360 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/17f36225-1711-4e80-abb4-e9afbc3e00e0-node-log\") pod \"ovnkube-node-cdmjt\" (UID: \"17f36225-1711-4e80-abb4-e9afbc3e00e0\") " pod="openshift-ovn-kubernetes/ovnkube-node-cdmjt" Dec 06 15:45:24 crc kubenswrapper[5003]: I1206 15:45:24.022383 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/17f36225-1711-4e80-abb4-e9afbc3e00e0-run-ovn\") pod \"ovnkube-node-cdmjt\" (UID: \"17f36225-1711-4e80-abb4-e9afbc3e00e0\") " pod="openshift-ovn-kubernetes/ovnkube-node-cdmjt" Dec 06 15:45:24 crc kubenswrapper[5003]: I1206 15:45:24.022400 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/17f36225-1711-4e80-abb4-e9afbc3e00e0-log-socket\") pod \"ovnkube-node-cdmjt\" (UID: \"17f36225-1711-4e80-abb4-e9afbc3e00e0\") " pod="openshift-ovn-kubernetes/ovnkube-node-cdmjt" Dec 06 15:45:24 crc kubenswrapper[5003]: I1206 15:45:24.022436 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/17f36225-1711-4e80-abb4-e9afbc3e00e0-run-systemd\") pod \"ovnkube-node-cdmjt\" (UID: \"17f36225-1711-4e80-abb4-e9afbc3e00e0\") " pod="openshift-ovn-kubernetes/ovnkube-node-cdmjt" Dec 06 15:45:24 crc kubenswrapper[5003]: I1206 15:45:24.022740 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/17f36225-1711-4e80-abb4-e9afbc3e00e0-run-openvswitch\") pod \"ovnkube-node-cdmjt\" (UID: \"17f36225-1711-4e80-abb4-e9afbc3e00e0\") " pod="openshift-ovn-kubernetes/ovnkube-node-cdmjt" Dec 06 15:45:24 crc kubenswrapper[5003]: I1206 15:45:24.023000 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/17f36225-1711-4e80-abb4-e9afbc3e00e0-env-overrides\") pod \"ovnkube-node-cdmjt\" (UID: \"17f36225-1711-4e80-abb4-e9afbc3e00e0\") " pod="openshift-ovn-kubernetes/ovnkube-node-cdmjt" Dec 06 15:45:24 crc kubenswrapper[5003]: I1206 15:45:24.023040 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/17f36225-1711-4e80-abb4-e9afbc3e00e0-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-cdmjt\" (UID: \"17f36225-1711-4e80-abb4-e9afbc3e00e0\") " pod="openshift-ovn-kubernetes/ovnkube-node-cdmjt" Dec 06 15:45:24 crc kubenswrapper[5003]: I1206 15:45:24.023306 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/17f36225-1711-4e80-abb4-e9afbc3e00e0-ovnkube-script-lib\") pod \"ovnkube-node-cdmjt\" (UID: \"17f36225-1711-4e80-abb4-e9afbc3e00e0\") " pod="openshift-ovn-kubernetes/ovnkube-node-cdmjt" Dec 06 15:45:24 crc kubenswrapper[5003]: I1206 15:45:24.023412 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/17f36225-1711-4e80-abb4-e9afbc3e00e0-ovnkube-config\") pod \"ovnkube-node-cdmjt\" (UID: \"17f36225-1711-4e80-abb4-e9afbc3e00e0\") " pod="openshift-ovn-kubernetes/ovnkube-node-cdmjt" Dec 06 15:45:24 crc kubenswrapper[5003]: I1206 15:45:24.025781 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/17f36225-1711-4e80-abb4-e9afbc3e00e0-ovn-node-metrics-cert\") pod \"ovnkube-node-cdmjt\" (UID: \"17f36225-1711-4e80-abb4-e9afbc3e00e0\") " pod="openshift-ovn-kubernetes/ovnkube-node-cdmjt" Dec 06 15:45:24 crc kubenswrapper[5003]: I1206 15:45:24.037714 5003 scope.go:117] "RemoveContainer" containerID="6a2972bc7291d2adcafac608a303008342fc7672795ad47464e77f3413f5822b" Dec 06 15:45:24 crc kubenswrapper[5003]: E1206 15:45:24.038039 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6a2972bc7291d2adcafac608a303008342fc7672795ad47464e77f3413f5822b\": container with ID starting with 6a2972bc7291d2adcafac608a303008342fc7672795ad47464e77f3413f5822b not found: ID does not exist" containerID="6a2972bc7291d2adcafac608a303008342fc7672795ad47464e77f3413f5822b" Dec 06 15:45:24 crc kubenswrapper[5003]: I1206 15:45:24.038072 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6a2972bc7291d2adcafac608a303008342fc7672795ad47464e77f3413f5822b"} err="failed to get container status \"6a2972bc7291d2adcafac608a303008342fc7672795ad47464e77f3413f5822b\": rpc error: code = NotFound desc = could not find container \"6a2972bc7291d2adcafac608a303008342fc7672795ad47464e77f3413f5822b\": container with ID starting with 6a2972bc7291d2adcafac608a303008342fc7672795ad47464e77f3413f5822b not found: ID does not exist" Dec 06 15:45:24 crc kubenswrapper[5003]: I1206 15:45:24.038093 5003 scope.go:117] "RemoveContainer" containerID="9517eb57092dd5943648e1004f42c5da7be2e94b4e0460d9595a63ef5194be22" Dec 06 15:45:24 crc kubenswrapper[5003]: E1206 15:45:24.038356 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9517eb57092dd5943648e1004f42c5da7be2e94b4e0460d9595a63ef5194be22\": container with ID starting with 9517eb57092dd5943648e1004f42c5da7be2e94b4e0460d9595a63ef5194be22 not found: ID does not exist" containerID="9517eb57092dd5943648e1004f42c5da7be2e94b4e0460d9595a63ef5194be22" Dec 06 15:45:24 crc kubenswrapper[5003]: I1206 15:45:24.038375 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9517eb57092dd5943648e1004f42c5da7be2e94b4e0460d9595a63ef5194be22"} err="failed to get container status \"9517eb57092dd5943648e1004f42c5da7be2e94b4e0460d9595a63ef5194be22\": rpc error: code = NotFound desc = could not find container \"9517eb57092dd5943648e1004f42c5da7be2e94b4e0460d9595a63ef5194be22\": container with ID starting with 9517eb57092dd5943648e1004f42c5da7be2e94b4e0460d9595a63ef5194be22 not found: ID does not exist" Dec 06 15:45:24 crc kubenswrapper[5003]: I1206 15:45:24.038386 5003 scope.go:117] "RemoveContainer" containerID="c56c8c7aa570bb81c41923b20b09d6de6b278e56ca466e7e0ee3cecf113c37d6" Dec 06 15:45:24 crc kubenswrapper[5003]: E1206 15:45:24.038590 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c56c8c7aa570bb81c41923b20b09d6de6b278e56ca466e7e0ee3cecf113c37d6\": container with ID starting with c56c8c7aa570bb81c41923b20b09d6de6b278e56ca466e7e0ee3cecf113c37d6 not found: ID does not exist" containerID="c56c8c7aa570bb81c41923b20b09d6de6b278e56ca466e7e0ee3cecf113c37d6" Dec 06 15:45:24 crc kubenswrapper[5003]: I1206 15:45:24.038608 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c56c8c7aa570bb81c41923b20b09d6de6b278e56ca466e7e0ee3cecf113c37d6"} err="failed to get container status \"c56c8c7aa570bb81c41923b20b09d6de6b278e56ca466e7e0ee3cecf113c37d6\": rpc error: code = NotFound desc = could not find container \"c56c8c7aa570bb81c41923b20b09d6de6b278e56ca466e7e0ee3cecf113c37d6\": container with ID starting with c56c8c7aa570bb81c41923b20b09d6de6b278e56ca466e7e0ee3cecf113c37d6 not found: ID does not exist" Dec 06 15:45:24 crc kubenswrapper[5003]: I1206 15:45:24.038620 5003 scope.go:117] "RemoveContainer" containerID="5240dcf05f3d661b92408f4c39999571d023d9acdeb31d4a4a99f50e31a627b6" Dec 06 15:45:24 crc kubenswrapper[5003]: E1206 15:45:24.038847 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5240dcf05f3d661b92408f4c39999571d023d9acdeb31d4a4a99f50e31a627b6\": container with ID starting with 5240dcf05f3d661b92408f4c39999571d023d9acdeb31d4a4a99f50e31a627b6 not found: ID does not exist" containerID="5240dcf05f3d661b92408f4c39999571d023d9acdeb31d4a4a99f50e31a627b6" Dec 06 15:45:24 crc kubenswrapper[5003]: I1206 15:45:24.038870 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5240dcf05f3d661b92408f4c39999571d023d9acdeb31d4a4a99f50e31a627b6"} err="failed to get container status \"5240dcf05f3d661b92408f4c39999571d023d9acdeb31d4a4a99f50e31a627b6\": rpc error: code = NotFound desc = could not find container \"5240dcf05f3d661b92408f4c39999571d023d9acdeb31d4a4a99f50e31a627b6\": container with ID starting with 5240dcf05f3d661b92408f4c39999571d023d9acdeb31d4a4a99f50e31a627b6 not found: ID does not exist" Dec 06 15:45:24 crc kubenswrapper[5003]: I1206 15:45:24.038885 5003 scope.go:117] "RemoveContainer" containerID="e240d517f92fb2b8e24bb6f1c1d98869b41adf8bc3ab687a4426840c7010235e" Dec 06 15:45:24 crc kubenswrapper[5003]: E1206 15:45:24.039042 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e240d517f92fb2b8e24bb6f1c1d98869b41adf8bc3ab687a4426840c7010235e\": container with ID starting with e240d517f92fb2b8e24bb6f1c1d98869b41adf8bc3ab687a4426840c7010235e not found: ID does not exist" containerID="e240d517f92fb2b8e24bb6f1c1d98869b41adf8bc3ab687a4426840c7010235e" Dec 06 15:45:24 crc kubenswrapper[5003]: I1206 15:45:24.039061 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e240d517f92fb2b8e24bb6f1c1d98869b41adf8bc3ab687a4426840c7010235e"} err="failed to get container status \"e240d517f92fb2b8e24bb6f1c1d98869b41adf8bc3ab687a4426840c7010235e\": rpc error: code = NotFound desc = could not find container \"e240d517f92fb2b8e24bb6f1c1d98869b41adf8bc3ab687a4426840c7010235e\": container with ID starting with e240d517f92fb2b8e24bb6f1c1d98869b41adf8bc3ab687a4426840c7010235e not found: ID does not exist" Dec 06 15:45:24 crc kubenswrapper[5003]: I1206 15:45:24.039073 5003 scope.go:117] "RemoveContainer" containerID="b549c3d1ba39068a4f1ce344e0807eed3b4e73ee1902d02cebffe005697201d2" Dec 06 15:45:24 crc kubenswrapper[5003]: E1206 15:45:24.039294 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b549c3d1ba39068a4f1ce344e0807eed3b4e73ee1902d02cebffe005697201d2\": container with ID starting with b549c3d1ba39068a4f1ce344e0807eed3b4e73ee1902d02cebffe005697201d2 not found: ID does not exist" containerID="b549c3d1ba39068a4f1ce344e0807eed3b4e73ee1902d02cebffe005697201d2" Dec 06 15:45:24 crc kubenswrapper[5003]: I1206 15:45:24.039316 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b549c3d1ba39068a4f1ce344e0807eed3b4e73ee1902d02cebffe005697201d2"} err="failed to get container status \"b549c3d1ba39068a4f1ce344e0807eed3b4e73ee1902d02cebffe005697201d2\": rpc error: code = NotFound desc = could not find container \"b549c3d1ba39068a4f1ce344e0807eed3b4e73ee1902d02cebffe005697201d2\": container with ID starting with b549c3d1ba39068a4f1ce344e0807eed3b4e73ee1902d02cebffe005697201d2 not found: ID does not exist" Dec 06 15:45:24 crc kubenswrapper[5003]: I1206 15:45:24.039347 5003 scope.go:117] "RemoveContainer" containerID="b26a01a6a0644d88028ebc041e00353091f26407f109e02e4aec1ec0e382a4c6" Dec 06 15:45:24 crc kubenswrapper[5003]: I1206 15:45:24.040409 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-drqw8\" (UniqueName: \"kubernetes.io/projected/17f36225-1711-4e80-abb4-e9afbc3e00e0-kube-api-access-drqw8\") pod \"ovnkube-node-cdmjt\" (UID: \"17f36225-1711-4e80-abb4-e9afbc3e00e0\") " pod="openshift-ovn-kubernetes/ovnkube-node-cdmjt" Dec 06 15:45:24 crc kubenswrapper[5003]: E1206 15:45:24.041428 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b26a01a6a0644d88028ebc041e00353091f26407f109e02e4aec1ec0e382a4c6\": container with ID starting with b26a01a6a0644d88028ebc041e00353091f26407f109e02e4aec1ec0e382a4c6 not found: ID does not exist" containerID="b26a01a6a0644d88028ebc041e00353091f26407f109e02e4aec1ec0e382a4c6" Dec 06 15:45:24 crc kubenswrapper[5003]: I1206 15:45:24.041465 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b26a01a6a0644d88028ebc041e00353091f26407f109e02e4aec1ec0e382a4c6"} err="failed to get container status \"b26a01a6a0644d88028ebc041e00353091f26407f109e02e4aec1ec0e382a4c6\": rpc error: code = NotFound desc = could not find container \"b26a01a6a0644d88028ebc041e00353091f26407f109e02e4aec1ec0e382a4c6\": container with ID starting with b26a01a6a0644d88028ebc041e00353091f26407f109e02e4aec1ec0e382a4c6 not found: ID does not exist" Dec 06 15:45:24 crc kubenswrapper[5003]: I1206 15:45:24.041513 5003 scope.go:117] "RemoveContainer" containerID="b871ea5e9d2093579af10786c02da9d7f5afa5351933be742b67b1b682733119" Dec 06 15:45:24 crc kubenswrapper[5003]: E1206 15:45:24.041836 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b871ea5e9d2093579af10786c02da9d7f5afa5351933be742b67b1b682733119\": container with ID starting with b871ea5e9d2093579af10786c02da9d7f5afa5351933be742b67b1b682733119 not found: ID does not exist" containerID="b871ea5e9d2093579af10786c02da9d7f5afa5351933be742b67b1b682733119" Dec 06 15:45:24 crc kubenswrapper[5003]: I1206 15:45:24.041876 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b871ea5e9d2093579af10786c02da9d7f5afa5351933be742b67b1b682733119"} err="failed to get container status \"b871ea5e9d2093579af10786c02da9d7f5afa5351933be742b67b1b682733119\": rpc error: code = NotFound desc = could not find container \"b871ea5e9d2093579af10786c02da9d7f5afa5351933be742b67b1b682733119\": container with ID starting with b871ea5e9d2093579af10786c02da9d7f5afa5351933be742b67b1b682733119 not found: ID does not exist" Dec 06 15:45:24 crc kubenswrapper[5003]: I1206 15:45:24.041906 5003 scope.go:117] "RemoveContainer" containerID="276d6d2f680f4e0b2038d12b161e3fd3f85417b5d776b9661559b4812ead1dd9" Dec 06 15:45:24 crc kubenswrapper[5003]: E1206 15:45:24.042161 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"276d6d2f680f4e0b2038d12b161e3fd3f85417b5d776b9661559b4812ead1dd9\": container with ID starting with 276d6d2f680f4e0b2038d12b161e3fd3f85417b5d776b9661559b4812ead1dd9 not found: ID does not exist" containerID="276d6d2f680f4e0b2038d12b161e3fd3f85417b5d776b9661559b4812ead1dd9" Dec 06 15:45:24 crc kubenswrapper[5003]: I1206 15:45:24.042184 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"276d6d2f680f4e0b2038d12b161e3fd3f85417b5d776b9661559b4812ead1dd9"} err="failed to get container status \"276d6d2f680f4e0b2038d12b161e3fd3f85417b5d776b9661559b4812ead1dd9\": rpc error: code = NotFound desc = could not find container \"276d6d2f680f4e0b2038d12b161e3fd3f85417b5d776b9661559b4812ead1dd9\": container with ID starting with 276d6d2f680f4e0b2038d12b161e3fd3f85417b5d776b9661559b4812ead1dd9 not found: ID does not exist" Dec 06 15:45:24 crc kubenswrapper[5003]: I1206 15:45:24.042197 5003 scope.go:117] "RemoveContainer" containerID="9a00902f2c2c7ca56d86553f78094f40f59ea9ef125f5a388aafd6d7fd0c20d9" Dec 06 15:45:24 crc kubenswrapper[5003]: E1206 15:45:24.042605 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9a00902f2c2c7ca56d86553f78094f40f59ea9ef125f5a388aafd6d7fd0c20d9\": container with ID starting with 9a00902f2c2c7ca56d86553f78094f40f59ea9ef125f5a388aafd6d7fd0c20d9 not found: ID does not exist" containerID="9a00902f2c2c7ca56d86553f78094f40f59ea9ef125f5a388aafd6d7fd0c20d9" Dec 06 15:45:24 crc kubenswrapper[5003]: I1206 15:45:24.042627 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9a00902f2c2c7ca56d86553f78094f40f59ea9ef125f5a388aafd6d7fd0c20d9"} err="failed to get container status \"9a00902f2c2c7ca56d86553f78094f40f59ea9ef125f5a388aafd6d7fd0c20d9\": rpc error: code = NotFound desc = could not find container \"9a00902f2c2c7ca56d86553f78094f40f59ea9ef125f5a388aafd6d7fd0c20d9\": container with ID starting with 9a00902f2c2c7ca56d86553f78094f40f59ea9ef125f5a388aafd6d7fd0c20d9 not found: ID does not exist" Dec 06 15:45:24 crc kubenswrapper[5003]: I1206 15:45:24.042645 5003 scope.go:117] "RemoveContainer" containerID="6a2972bc7291d2adcafac608a303008342fc7672795ad47464e77f3413f5822b" Dec 06 15:45:24 crc kubenswrapper[5003]: I1206 15:45:24.042897 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6a2972bc7291d2adcafac608a303008342fc7672795ad47464e77f3413f5822b"} err="failed to get container status \"6a2972bc7291d2adcafac608a303008342fc7672795ad47464e77f3413f5822b\": rpc error: code = NotFound desc = could not find container \"6a2972bc7291d2adcafac608a303008342fc7672795ad47464e77f3413f5822b\": container with ID starting with 6a2972bc7291d2adcafac608a303008342fc7672795ad47464e77f3413f5822b not found: ID does not exist" Dec 06 15:45:24 crc kubenswrapper[5003]: I1206 15:45:24.042921 5003 scope.go:117] "RemoveContainer" containerID="9517eb57092dd5943648e1004f42c5da7be2e94b4e0460d9595a63ef5194be22" Dec 06 15:45:24 crc kubenswrapper[5003]: I1206 15:45:24.043242 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9517eb57092dd5943648e1004f42c5da7be2e94b4e0460d9595a63ef5194be22"} err="failed to get container status \"9517eb57092dd5943648e1004f42c5da7be2e94b4e0460d9595a63ef5194be22\": rpc error: code = NotFound desc = could not find container \"9517eb57092dd5943648e1004f42c5da7be2e94b4e0460d9595a63ef5194be22\": container with ID starting with 9517eb57092dd5943648e1004f42c5da7be2e94b4e0460d9595a63ef5194be22 not found: ID does not exist" Dec 06 15:45:24 crc kubenswrapper[5003]: I1206 15:45:24.043259 5003 scope.go:117] "RemoveContainer" containerID="c56c8c7aa570bb81c41923b20b09d6de6b278e56ca466e7e0ee3cecf113c37d6" Dec 06 15:45:24 crc kubenswrapper[5003]: I1206 15:45:24.043514 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c56c8c7aa570bb81c41923b20b09d6de6b278e56ca466e7e0ee3cecf113c37d6"} err="failed to get container status \"c56c8c7aa570bb81c41923b20b09d6de6b278e56ca466e7e0ee3cecf113c37d6\": rpc error: code = NotFound desc = could not find container \"c56c8c7aa570bb81c41923b20b09d6de6b278e56ca466e7e0ee3cecf113c37d6\": container with ID starting with c56c8c7aa570bb81c41923b20b09d6de6b278e56ca466e7e0ee3cecf113c37d6 not found: ID does not exist" Dec 06 15:45:24 crc kubenswrapper[5003]: I1206 15:45:24.043532 5003 scope.go:117] "RemoveContainer" containerID="5240dcf05f3d661b92408f4c39999571d023d9acdeb31d4a4a99f50e31a627b6" Dec 06 15:45:24 crc kubenswrapper[5003]: I1206 15:45:24.043704 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5240dcf05f3d661b92408f4c39999571d023d9acdeb31d4a4a99f50e31a627b6"} err="failed to get container status \"5240dcf05f3d661b92408f4c39999571d023d9acdeb31d4a4a99f50e31a627b6\": rpc error: code = NotFound desc = could not find container \"5240dcf05f3d661b92408f4c39999571d023d9acdeb31d4a4a99f50e31a627b6\": container with ID starting with 5240dcf05f3d661b92408f4c39999571d023d9acdeb31d4a4a99f50e31a627b6 not found: ID does not exist" Dec 06 15:45:24 crc kubenswrapper[5003]: I1206 15:45:24.043722 5003 scope.go:117] "RemoveContainer" containerID="e240d517f92fb2b8e24bb6f1c1d98869b41adf8bc3ab687a4426840c7010235e" Dec 06 15:45:24 crc kubenswrapper[5003]: I1206 15:45:24.044051 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e240d517f92fb2b8e24bb6f1c1d98869b41adf8bc3ab687a4426840c7010235e"} err="failed to get container status \"e240d517f92fb2b8e24bb6f1c1d98869b41adf8bc3ab687a4426840c7010235e\": rpc error: code = NotFound desc = could not find container \"e240d517f92fb2b8e24bb6f1c1d98869b41adf8bc3ab687a4426840c7010235e\": container with ID starting with e240d517f92fb2b8e24bb6f1c1d98869b41adf8bc3ab687a4426840c7010235e not found: ID does not exist" Dec 06 15:45:24 crc kubenswrapper[5003]: I1206 15:45:24.044065 5003 scope.go:117] "RemoveContainer" containerID="b549c3d1ba39068a4f1ce344e0807eed3b4e73ee1902d02cebffe005697201d2" Dec 06 15:45:24 crc kubenswrapper[5003]: I1206 15:45:24.044276 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b549c3d1ba39068a4f1ce344e0807eed3b4e73ee1902d02cebffe005697201d2"} err="failed to get container status \"b549c3d1ba39068a4f1ce344e0807eed3b4e73ee1902d02cebffe005697201d2\": rpc error: code = NotFound desc = could not find container \"b549c3d1ba39068a4f1ce344e0807eed3b4e73ee1902d02cebffe005697201d2\": container with ID starting with b549c3d1ba39068a4f1ce344e0807eed3b4e73ee1902d02cebffe005697201d2 not found: ID does not exist" Dec 06 15:45:24 crc kubenswrapper[5003]: I1206 15:45:24.044292 5003 scope.go:117] "RemoveContainer" containerID="b26a01a6a0644d88028ebc041e00353091f26407f109e02e4aec1ec0e382a4c6" Dec 06 15:45:24 crc kubenswrapper[5003]: I1206 15:45:24.044579 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b26a01a6a0644d88028ebc041e00353091f26407f109e02e4aec1ec0e382a4c6"} err="failed to get container status \"b26a01a6a0644d88028ebc041e00353091f26407f109e02e4aec1ec0e382a4c6\": rpc error: code = NotFound desc = could not find container \"b26a01a6a0644d88028ebc041e00353091f26407f109e02e4aec1ec0e382a4c6\": container with ID starting with b26a01a6a0644d88028ebc041e00353091f26407f109e02e4aec1ec0e382a4c6 not found: ID does not exist" Dec 06 15:45:24 crc kubenswrapper[5003]: I1206 15:45:24.044595 5003 scope.go:117] "RemoveContainer" containerID="b871ea5e9d2093579af10786c02da9d7f5afa5351933be742b67b1b682733119" Dec 06 15:45:24 crc kubenswrapper[5003]: I1206 15:45:24.044756 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b871ea5e9d2093579af10786c02da9d7f5afa5351933be742b67b1b682733119"} err="failed to get container status \"b871ea5e9d2093579af10786c02da9d7f5afa5351933be742b67b1b682733119\": rpc error: code = NotFound desc = could not find container \"b871ea5e9d2093579af10786c02da9d7f5afa5351933be742b67b1b682733119\": container with ID starting with b871ea5e9d2093579af10786c02da9d7f5afa5351933be742b67b1b682733119 not found: ID does not exist" Dec 06 15:45:24 crc kubenswrapper[5003]: I1206 15:45:24.044774 5003 scope.go:117] "RemoveContainer" containerID="276d6d2f680f4e0b2038d12b161e3fd3f85417b5d776b9661559b4812ead1dd9" Dec 06 15:45:24 crc kubenswrapper[5003]: I1206 15:45:24.045143 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"276d6d2f680f4e0b2038d12b161e3fd3f85417b5d776b9661559b4812ead1dd9"} err="failed to get container status \"276d6d2f680f4e0b2038d12b161e3fd3f85417b5d776b9661559b4812ead1dd9\": rpc error: code = NotFound desc = could not find container \"276d6d2f680f4e0b2038d12b161e3fd3f85417b5d776b9661559b4812ead1dd9\": container with ID starting with 276d6d2f680f4e0b2038d12b161e3fd3f85417b5d776b9661559b4812ead1dd9 not found: ID does not exist" Dec 06 15:45:24 crc kubenswrapper[5003]: I1206 15:45:24.045194 5003 scope.go:117] "RemoveContainer" containerID="9a00902f2c2c7ca56d86553f78094f40f59ea9ef125f5a388aafd6d7fd0c20d9" Dec 06 15:45:24 crc kubenswrapper[5003]: I1206 15:45:24.045472 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9a00902f2c2c7ca56d86553f78094f40f59ea9ef125f5a388aafd6d7fd0c20d9"} err="failed to get container status \"9a00902f2c2c7ca56d86553f78094f40f59ea9ef125f5a388aafd6d7fd0c20d9\": rpc error: code = NotFound desc = could not find container \"9a00902f2c2c7ca56d86553f78094f40f59ea9ef125f5a388aafd6d7fd0c20d9\": container with ID starting with 9a00902f2c2c7ca56d86553f78094f40f59ea9ef125f5a388aafd6d7fd0c20d9 not found: ID does not exist" Dec 06 15:45:24 crc kubenswrapper[5003]: I1206 15:45:24.045515 5003 scope.go:117] "RemoveContainer" containerID="6a2972bc7291d2adcafac608a303008342fc7672795ad47464e77f3413f5822b" Dec 06 15:45:24 crc kubenswrapper[5003]: I1206 15:45:24.045958 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6a2972bc7291d2adcafac608a303008342fc7672795ad47464e77f3413f5822b"} err="failed to get container status \"6a2972bc7291d2adcafac608a303008342fc7672795ad47464e77f3413f5822b\": rpc error: code = NotFound desc = could not find container \"6a2972bc7291d2adcafac608a303008342fc7672795ad47464e77f3413f5822b\": container with ID starting with 6a2972bc7291d2adcafac608a303008342fc7672795ad47464e77f3413f5822b not found: ID does not exist" Dec 06 15:45:24 crc kubenswrapper[5003]: I1206 15:45:24.045978 5003 scope.go:117] "RemoveContainer" containerID="9517eb57092dd5943648e1004f42c5da7be2e94b4e0460d9595a63ef5194be22" Dec 06 15:45:24 crc kubenswrapper[5003]: I1206 15:45:24.046193 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9517eb57092dd5943648e1004f42c5da7be2e94b4e0460d9595a63ef5194be22"} err="failed to get container status \"9517eb57092dd5943648e1004f42c5da7be2e94b4e0460d9595a63ef5194be22\": rpc error: code = NotFound desc = could not find container \"9517eb57092dd5943648e1004f42c5da7be2e94b4e0460d9595a63ef5194be22\": container with ID starting with 9517eb57092dd5943648e1004f42c5da7be2e94b4e0460d9595a63ef5194be22 not found: ID does not exist" Dec 06 15:45:24 crc kubenswrapper[5003]: I1206 15:45:24.046208 5003 scope.go:117] "RemoveContainer" containerID="c56c8c7aa570bb81c41923b20b09d6de6b278e56ca466e7e0ee3cecf113c37d6" Dec 06 15:45:24 crc kubenswrapper[5003]: I1206 15:45:24.046576 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c56c8c7aa570bb81c41923b20b09d6de6b278e56ca466e7e0ee3cecf113c37d6"} err="failed to get container status \"c56c8c7aa570bb81c41923b20b09d6de6b278e56ca466e7e0ee3cecf113c37d6\": rpc error: code = NotFound desc = could not find container \"c56c8c7aa570bb81c41923b20b09d6de6b278e56ca466e7e0ee3cecf113c37d6\": container with ID starting with c56c8c7aa570bb81c41923b20b09d6de6b278e56ca466e7e0ee3cecf113c37d6 not found: ID does not exist" Dec 06 15:45:24 crc kubenswrapper[5003]: I1206 15:45:24.046606 5003 scope.go:117] "RemoveContainer" containerID="5240dcf05f3d661b92408f4c39999571d023d9acdeb31d4a4a99f50e31a627b6" Dec 06 15:45:24 crc kubenswrapper[5003]: I1206 15:45:24.046913 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5240dcf05f3d661b92408f4c39999571d023d9acdeb31d4a4a99f50e31a627b6"} err="failed to get container status \"5240dcf05f3d661b92408f4c39999571d023d9acdeb31d4a4a99f50e31a627b6\": rpc error: code = NotFound desc = could not find container \"5240dcf05f3d661b92408f4c39999571d023d9acdeb31d4a4a99f50e31a627b6\": container with ID starting with 5240dcf05f3d661b92408f4c39999571d023d9acdeb31d4a4a99f50e31a627b6 not found: ID does not exist" Dec 06 15:45:24 crc kubenswrapper[5003]: I1206 15:45:24.046958 5003 scope.go:117] "RemoveContainer" containerID="e240d517f92fb2b8e24bb6f1c1d98869b41adf8bc3ab687a4426840c7010235e" Dec 06 15:45:24 crc kubenswrapper[5003]: I1206 15:45:24.047244 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e240d517f92fb2b8e24bb6f1c1d98869b41adf8bc3ab687a4426840c7010235e"} err="failed to get container status \"e240d517f92fb2b8e24bb6f1c1d98869b41adf8bc3ab687a4426840c7010235e\": rpc error: code = NotFound desc = could not find container \"e240d517f92fb2b8e24bb6f1c1d98869b41adf8bc3ab687a4426840c7010235e\": container with ID starting with e240d517f92fb2b8e24bb6f1c1d98869b41adf8bc3ab687a4426840c7010235e not found: ID does not exist" Dec 06 15:45:24 crc kubenswrapper[5003]: I1206 15:45:24.047267 5003 scope.go:117] "RemoveContainer" containerID="b549c3d1ba39068a4f1ce344e0807eed3b4e73ee1902d02cebffe005697201d2" Dec 06 15:45:24 crc kubenswrapper[5003]: I1206 15:45:24.047555 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b549c3d1ba39068a4f1ce344e0807eed3b4e73ee1902d02cebffe005697201d2"} err="failed to get container status \"b549c3d1ba39068a4f1ce344e0807eed3b4e73ee1902d02cebffe005697201d2\": rpc error: code = NotFound desc = could not find container \"b549c3d1ba39068a4f1ce344e0807eed3b4e73ee1902d02cebffe005697201d2\": container with ID starting with b549c3d1ba39068a4f1ce344e0807eed3b4e73ee1902d02cebffe005697201d2 not found: ID does not exist" Dec 06 15:45:24 crc kubenswrapper[5003]: I1206 15:45:24.047583 5003 scope.go:117] "RemoveContainer" containerID="b26a01a6a0644d88028ebc041e00353091f26407f109e02e4aec1ec0e382a4c6" Dec 06 15:45:24 crc kubenswrapper[5003]: I1206 15:45:24.047825 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b26a01a6a0644d88028ebc041e00353091f26407f109e02e4aec1ec0e382a4c6"} err="failed to get container status \"b26a01a6a0644d88028ebc041e00353091f26407f109e02e4aec1ec0e382a4c6\": rpc error: code = NotFound desc = could not find container \"b26a01a6a0644d88028ebc041e00353091f26407f109e02e4aec1ec0e382a4c6\": container with ID starting with b26a01a6a0644d88028ebc041e00353091f26407f109e02e4aec1ec0e382a4c6 not found: ID does not exist" Dec 06 15:45:24 crc kubenswrapper[5003]: I1206 15:45:24.047846 5003 scope.go:117] "RemoveContainer" containerID="b871ea5e9d2093579af10786c02da9d7f5afa5351933be742b67b1b682733119" Dec 06 15:45:24 crc kubenswrapper[5003]: I1206 15:45:24.048109 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b871ea5e9d2093579af10786c02da9d7f5afa5351933be742b67b1b682733119"} err="failed to get container status \"b871ea5e9d2093579af10786c02da9d7f5afa5351933be742b67b1b682733119\": rpc error: code = NotFound desc = could not find container \"b871ea5e9d2093579af10786c02da9d7f5afa5351933be742b67b1b682733119\": container with ID starting with b871ea5e9d2093579af10786c02da9d7f5afa5351933be742b67b1b682733119 not found: ID does not exist" Dec 06 15:45:24 crc kubenswrapper[5003]: I1206 15:45:24.048127 5003 scope.go:117] "RemoveContainer" containerID="276d6d2f680f4e0b2038d12b161e3fd3f85417b5d776b9661559b4812ead1dd9" Dec 06 15:45:24 crc kubenswrapper[5003]: I1206 15:45:24.048369 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"276d6d2f680f4e0b2038d12b161e3fd3f85417b5d776b9661559b4812ead1dd9"} err="failed to get container status \"276d6d2f680f4e0b2038d12b161e3fd3f85417b5d776b9661559b4812ead1dd9\": rpc error: code = NotFound desc = could not find container \"276d6d2f680f4e0b2038d12b161e3fd3f85417b5d776b9661559b4812ead1dd9\": container with ID starting with 276d6d2f680f4e0b2038d12b161e3fd3f85417b5d776b9661559b4812ead1dd9 not found: ID does not exist" Dec 06 15:45:24 crc kubenswrapper[5003]: I1206 15:45:24.048402 5003 scope.go:117] "RemoveContainer" containerID="9a00902f2c2c7ca56d86553f78094f40f59ea9ef125f5a388aafd6d7fd0c20d9" Dec 06 15:45:24 crc kubenswrapper[5003]: I1206 15:45:24.048681 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9a00902f2c2c7ca56d86553f78094f40f59ea9ef125f5a388aafd6d7fd0c20d9"} err="failed to get container status \"9a00902f2c2c7ca56d86553f78094f40f59ea9ef125f5a388aafd6d7fd0c20d9\": rpc error: code = NotFound desc = could not find container \"9a00902f2c2c7ca56d86553f78094f40f59ea9ef125f5a388aafd6d7fd0c20d9\": container with ID starting with 9a00902f2c2c7ca56d86553f78094f40f59ea9ef125f5a388aafd6d7fd0c20d9 not found: ID does not exist" Dec 06 15:45:24 crc kubenswrapper[5003]: I1206 15:45:24.048700 5003 scope.go:117] "RemoveContainer" containerID="6a2972bc7291d2adcafac608a303008342fc7672795ad47464e77f3413f5822b" Dec 06 15:45:24 crc kubenswrapper[5003]: I1206 15:45:24.048902 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6a2972bc7291d2adcafac608a303008342fc7672795ad47464e77f3413f5822b"} err="failed to get container status \"6a2972bc7291d2adcafac608a303008342fc7672795ad47464e77f3413f5822b\": rpc error: code = NotFound desc = could not find container \"6a2972bc7291d2adcafac608a303008342fc7672795ad47464e77f3413f5822b\": container with ID starting with 6a2972bc7291d2adcafac608a303008342fc7672795ad47464e77f3413f5822b not found: ID does not exist" Dec 06 15:45:24 crc kubenswrapper[5003]: I1206 15:45:24.048918 5003 scope.go:117] "RemoveContainer" containerID="9517eb57092dd5943648e1004f42c5da7be2e94b4e0460d9595a63ef5194be22" Dec 06 15:45:24 crc kubenswrapper[5003]: I1206 15:45:24.049650 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9517eb57092dd5943648e1004f42c5da7be2e94b4e0460d9595a63ef5194be22"} err="failed to get container status \"9517eb57092dd5943648e1004f42c5da7be2e94b4e0460d9595a63ef5194be22\": rpc error: code = NotFound desc = could not find container \"9517eb57092dd5943648e1004f42c5da7be2e94b4e0460d9595a63ef5194be22\": container with ID starting with 9517eb57092dd5943648e1004f42c5da7be2e94b4e0460d9595a63ef5194be22 not found: ID does not exist" Dec 06 15:45:24 crc kubenswrapper[5003]: I1206 15:45:24.049668 5003 scope.go:117] "RemoveContainer" containerID="c56c8c7aa570bb81c41923b20b09d6de6b278e56ca466e7e0ee3cecf113c37d6" Dec 06 15:45:24 crc kubenswrapper[5003]: I1206 15:45:24.050058 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c56c8c7aa570bb81c41923b20b09d6de6b278e56ca466e7e0ee3cecf113c37d6"} err="failed to get container status \"c56c8c7aa570bb81c41923b20b09d6de6b278e56ca466e7e0ee3cecf113c37d6\": rpc error: code = NotFound desc = could not find container \"c56c8c7aa570bb81c41923b20b09d6de6b278e56ca466e7e0ee3cecf113c37d6\": container with ID starting with c56c8c7aa570bb81c41923b20b09d6de6b278e56ca466e7e0ee3cecf113c37d6 not found: ID does not exist" Dec 06 15:45:24 crc kubenswrapper[5003]: I1206 15:45:24.050075 5003 scope.go:117] "RemoveContainer" containerID="5240dcf05f3d661b92408f4c39999571d023d9acdeb31d4a4a99f50e31a627b6" Dec 06 15:45:24 crc kubenswrapper[5003]: I1206 15:45:24.050382 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5240dcf05f3d661b92408f4c39999571d023d9acdeb31d4a4a99f50e31a627b6"} err="failed to get container status \"5240dcf05f3d661b92408f4c39999571d023d9acdeb31d4a4a99f50e31a627b6\": rpc error: code = NotFound desc = could not find container \"5240dcf05f3d661b92408f4c39999571d023d9acdeb31d4a4a99f50e31a627b6\": container with ID starting with 5240dcf05f3d661b92408f4c39999571d023d9acdeb31d4a4a99f50e31a627b6 not found: ID does not exist" Dec 06 15:45:24 crc kubenswrapper[5003]: I1206 15:45:24.050399 5003 scope.go:117] "RemoveContainer" containerID="e240d517f92fb2b8e24bb6f1c1d98869b41adf8bc3ab687a4426840c7010235e" Dec 06 15:45:24 crc kubenswrapper[5003]: I1206 15:45:24.050653 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e240d517f92fb2b8e24bb6f1c1d98869b41adf8bc3ab687a4426840c7010235e"} err="failed to get container status \"e240d517f92fb2b8e24bb6f1c1d98869b41adf8bc3ab687a4426840c7010235e\": rpc error: code = NotFound desc = could not find container \"e240d517f92fb2b8e24bb6f1c1d98869b41adf8bc3ab687a4426840c7010235e\": container with ID starting with e240d517f92fb2b8e24bb6f1c1d98869b41adf8bc3ab687a4426840c7010235e not found: ID does not exist" Dec 06 15:45:24 crc kubenswrapper[5003]: I1206 15:45:24.050680 5003 scope.go:117] "RemoveContainer" containerID="b549c3d1ba39068a4f1ce344e0807eed3b4e73ee1902d02cebffe005697201d2" Dec 06 15:45:24 crc kubenswrapper[5003]: I1206 15:45:24.050906 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b549c3d1ba39068a4f1ce344e0807eed3b4e73ee1902d02cebffe005697201d2"} err="failed to get container status \"b549c3d1ba39068a4f1ce344e0807eed3b4e73ee1902d02cebffe005697201d2\": rpc error: code = NotFound desc = could not find container \"b549c3d1ba39068a4f1ce344e0807eed3b4e73ee1902d02cebffe005697201d2\": container with ID starting with b549c3d1ba39068a4f1ce344e0807eed3b4e73ee1902d02cebffe005697201d2 not found: ID does not exist" Dec 06 15:45:24 crc kubenswrapper[5003]: I1206 15:45:24.050925 5003 scope.go:117] "RemoveContainer" containerID="b26a01a6a0644d88028ebc041e00353091f26407f109e02e4aec1ec0e382a4c6" Dec 06 15:45:24 crc kubenswrapper[5003]: I1206 15:45:24.051103 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b26a01a6a0644d88028ebc041e00353091f26407f109e02e4aec1ec0e382a4c6"} err="failed to get container status \"b26a01a6a0644d88028ebc041e00353091f26407f109e02e4aec1ec0e382a4c6\": rpc error: code = NotFound desc = could not find container \"b26a01a6a0644d88028ebc041e00353091f26407f109e02e4aec1ec0e382a4c6\": container with ID starting with b26a01a6a0644d88028ebc041e00353091f26407f109e02e4aec1ec0e382a4c6 not found: ID does not exist" Dec 06 15:45:24 crc kubenswrapper[5003]: I1206 15:45:24.051119 5003 scope.go:117] "RemoveContainer" containerID="b871ea5e9d2093579af10786c02da9d7f5afa5351933be742b67b1b682733119" Dec 06 15:45:24 crc kubenswrapper[5003]: I1206 15:45:24.051271 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b871ea5e9d2093579af10786c02da9d7f5afa5351933be742b67b1b682733119"} err="failed to get container status \"b871ea5e9d2093579af10786c02da9d7f5afa5351933be742b67b1b682733119\": rpc error: code = NotFound desc = could not find container \"b871ea5e9d2093579af10786c02da9d7f5afa5351933be742b67b1b682733119\": container with ID starting with b871ea5e9d2093579af10786c02da9d7f5afa5351933be742b67b1b682733119 not found: ID does not exist" Dec 06 15:45:24 crc kubenswrapper[5003]: I1206 15:45:24.051287 5003 scope.go:117] "RemoveContainer" containerID="276d6d2f680f4e0b2038d12b161e3fd3f85417b5d776b9661559b4812ead1dd9" Dec 06 15:45:24 crc kubenswrapper[5003]: I1206 15:45:24.051451 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"276d6d2f680f4e0b2038d12b161e3fd3f85417b5d776b9661559b4812ead1dd9"} err="failed to get container status \"276d6d2f680f4e0b2038d12b161e3fd3f85417b5d776b9661559b4812ead1dd9\": rpc error: code = NotFound desc = could not find container \"276d6d2f680f4e0b2038d12b161e3fd3f85417b5d776b9661559b4812ead1dd9\": container with ID starting with 276d6d2f680f4e0b2038d12b161e3fd3f85417b5d776b9661559b4812ead1dd9 not found: ID does not exist" Dec 06 15:45:24 crc kubenswrapper[5003]: I1206 15:45:24.051466 5003 scope.go:117] "RemoveContainer" containerID="9a00902f2c2c7ca56d86553f78094f40f59ea9ef125f5a388aafd6d7fd0c20d9" Dec 06 15:45:24 crc kubenswrapper[5003]: I1206 15:45:24.051656 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9a00902f2c2c7ca56d86553f78094f40f59ea9ef125f5a388aafd6d7fd0c20d9"} err="failed to get container status \"9a00902f2c2c7ca56d86553f78094f40f59ea9ef125f5a388aafd6d7fd0c20d9\": rpc error: code = NotFound desc = could not find container \"9a00902f2c2c7ca56d86553f78094f40f59ea9ef125f5a388aafd6d7fd0c20d9\": container with ID starting with 9a00902f2c2c7ca56d86553f78094f40f59ea9ef125f5a388aafd6d7fd0c20d9 not found: ID does not exist" Dec 06 15:45:24 crc kubenswrapper[5003]: I1206 15:45:24.051672 5003 scope.go:117] "RemoveContainer" containerID="6a2972bc7291d2adcafac608a303008342fc7672795ad47464e77f3413f5822b" Dec 06 15:45:24 crc kubenswrapper[5003]: I1206 15:45:24.051837 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6a2972bc7291d2adcafac608a303008342fc7672795ad47464e77f3413f5822b"} err="failed to get container status \"6a2972bc7291d2adcafac608a303008342fc7672795ad47464e77f3413f5822b\": rpc error: code = NotFound desc = could not find container \"6a2972bc7291d2adcafac608a303008342fc7672795ad47464e77f3413f5822b\": container with ID starting with 6a2972bc7291d2adcafac608a303008342fc7672795ad47464e77f3413f5822b not found: ID does not exist" Dec 06 15:45:24 crc kubenswrapper[5003]: I1206 15:45:24.123008 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-cdmjt" Dec 06 15:45:24 crc kubenswrapper[5003]: W1206 15:45:24.136691 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod17f36225_1711_4e80_abb4_e9afbc3e00e0.slice/crio-2d9261e31523de57acd7ebf60a411d506129713d335911a4690d01029df31c10 WatchSource:0}: Error finding container 2d9261e31523de57acd7ebf60a411d506129713d335911a4690d01029df31c10: Status 404 returned error can't find the container with id 2d9261e31523de57acd7ebf60a411d506129713d335911a4690d01029df31c10 Dec 06 15:45:24 crc kubenswrapper[5003]: I1206 15:45:24.197728 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-p7xwd"] Dec 06 15:45:24 crc kubenswrapper[5003]: I1206 15:45:24.205236 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-p7xwd"] Dec 06 15:45:24 crc kubenswrapper[5003]: I1206 15:45:24.868569 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-9kdpn_350e8b9a-b7bf-4dc9-abe9-d10f7a088be3/kube-multus/2.log" Dec 06 15:45:24 crc kubenswrapper[5003]: I1206 15:45:24.870207 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-9kdpn_350e8b9a-b7bf-4dc9-abe9-d10f7a088be3/kube-multus/1.log" Dec 06 15:45:24 crc kubenswrapper[5003]: I1206 15:45:24.870307 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-9kdpn" event={"ID":"350e8b9a-b7bf-4dc9-abe9-d10f7a088be3","Type":"ContainerStarted","Data":"5497f3e87d6810fac5b8e59e641842db493ede974a0c9c98aecfa5d5c60c38f6"} Dec 06 15:45:24 crc kubenswrapper[5003]: I1206 15:45:24.873947 5003 generic.go:334] "Generic (PLEG): container finished" podID="17f36225-1711-4e80-abb4-e9afbc3e00e0" containerID="7a07ed696e2794d19df1f7567c8cdf77cd8aea8b39292b0a5c36948406dd2193" exitCode=0 Dec 06 15:45:24 crc kubenswrapper[5003]: I1206 15:45:24.873993 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-cdmjt" event={"ID":"17f36225-1711-4e80-abb4-e9afbc3e00e0","Type":"ContainerDied","Data":"7a07ed696e2794d19df1f7567c8cdf77cd8aea8b39292b0a5c36948406dd2193"} Dec 06 15:45:24 crc kubenswrapper[5003]: I1206 15:45:24.874022 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-cdmjt" event={"ID":"17f36225-1711-4e80-abb4-e9afbc3e00e0","Type":"ContainerStarted","Data":"2d9261e31523de57acd7ebf60a411d506129713d335911a4690d01029df31c10"} Dec 06 15:45:25 crc kubenswrapper[5003]: I1206 15:45:25.718349 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8a695d94-271c-45bc-8a89-dfdecb57ec00" path="/var/lib/kubelet/pods/8a695d94-271c-45bc-8a89-dfdecb57ec00/volumes" Dec 06 15:45:25 crc kubenswrapper[5003]: I1206 15:45:25.881888 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-cdmjt" event={"ID":"17f36225-1711-4e80-abb4-e9afbc3e00e0","Type":"ContainerStarted","Data":"b53cdfc4c00abfd28388c1d38af1be06cb3f0d14b793e03694d0ebbdc652ffb2"} Dec 06 15:45:25 crc kubenswrapper[5003]: I1206 15:45:25.881932 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-cdmjt" event={"ID":"17f36225-1711-4e80-abb4-e9afbc3e00e0","Type":"ContainerStarted","Data":"c4551a6953c494556aa59dddda2f86e6a09603829c249e2c39ebd98e05e7bfb4"} Dec 06 15:45:25 crc kubenswrapper[5003]: I1206 15:45:25.881943 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-cdmjt" event={"ID":"17f36225-1711-4e80-abb4-e9afbc3e00e0","Type":"ContainerStarted","Data":"2a64c230b72d644d160133a881b5ec9e629204168e69a75eb89391f8c5e909a5"} Dec 06 15:45:25 crc kubenswrapper[5003]: I1206 15:45:25.881954 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-cdmjt" event={"ID":"17f36225-1711-4e80-abb4-e9afbc3e00e0","Type":"ContainerStarted","Data":"7d529b73da07fd8dc155c72fedfeb2100a5ec54d9ee9999ef7b0f4fbbe8069bb"} Dec 06 15:45:25 crc kubenswrapper[5003]: I1206 15:45:25.881966 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-cdmjt" event={"ID":"17f36225-1711-4e80-abb4-e9afbc3e00e0","Type":"ContainerStarted","Data":"45d1873bd2d7766bf6c185d75f636aad0a6b68c0fecaea8df6cfef9e685cd3ae"} Dec 06 15:45:25 crc kubenswrapper[5003]: I1206 15:45:25.881980 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-cdmjt" event={"ID":"17f36225-1711-4e80-abb4-e9afbc3e00e0","Type":"ContainerStarted","Data":"df1823c30f50b3f98ef8ba39194831e991cca78a4380a09b68d574d2a8234a53"} Dec 06 15:45:27 crc kubenswrapper[5003]: I1206 15:45:27.899192 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-cdmjt" event={"ID":"17f36225-1711-4e80-abb4-e9afbc3e00e0","Type":"ContainerStarted","Data":"c7a5c17d6b5a80bd9a4c8dcf46235c13f5712dd94e047850061ad4d982dc157a"} Dec 06 15:45:31 crc kubenswrapper[5003]: I1206 15:45:31.929040 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-cdmjt" event={"ID":"17f36225-1711-4e80-abb4-e9afbc3e00e0","Type":"ContainerStarted","Data":"eca5e66ff95b36e185dceab69a1eaf417bebbd92ce98aa9f28f85eb3c43d6901"} Dec 06 15:45:31 crc kubenswrapper[5003]: I1206 15:45:31.929636 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-cdmjt" Dec 06 15:45:31 crc kubenswrapper[5003]: I1206 15:45:31.929725 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-cdmjt" Dec 06 15:45:31 crc kubenswrapper[5003]: I1206 15:45:31.929778 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-cdmjt" Dec 06 15:45:31 crc kubenswrapper[5003]: I1206 15:45:31.956374 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-cdmjt" Dec 06 15:45:31 crc kubenswrapper[5003]: I1206 15:45:31.958021 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-cdmjt" Dec 06 15:45:31 crc kubenswrapper[5003]: I1206 15:45:31.962395 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-cdmjt" podStartSLOduration=8.962380274000001 podStartE2EDuration="8.962380274s" podCreationTimestamp="2025-12-06 15:45:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 15:45:31.957074388 +0000 UTC m=+810.490428789" watchObservedRunningTime="2025-12-06 15:45:31.962380274 +0000 UTC m=+810.495734655" Dec 06 15:45:49 crc kubenswrapper[5003]: I1206 15:45:49.846101 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83gjfvn"] Dec 06 15:45:49 crc kubenswrapper[5003]: I1206 15:45:49.847677 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83gjfvn" Dec 06 15:45:49 crc kubenswrapper[5003]: I1206 15:45:49.849605 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Dec 06 15:45:49 crc kubenswrapper[5003]: I1206 15:45:49.859834 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83gjfvn"] Dec 06 15:45:49 crc kubenswrapper[5003]: I1206 15:45:49.979525 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/ccb39b5c-9a0b-4ce9-a83c-a41fda667b92-util\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83gjfvn\" (UID: \"ccb39b5c-9a0b-4ce9-a83c-a41fda667b92\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83gjfvn" Dec 06 15:45:49 crc kubenswrapper[5003]: I1206 15:45:49.979852 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9z74j\" (UniqueName: \"kubernetes.io/projected/ccb39b5c-9a0b-4ce9-a83c-a41fda667b92-kube-api-access-9z74j\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83gjfvn\" (UID: \"ccb39b5c-9a0b-4ce9-a83c-a41fda667b92\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83gjfvn" Dec 06 15:45:49 crc kubenswrapper[5003]: I1206 15:45:49.979887 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/ccb39b5c-9a0b-4ce9-a83c-a41fda667b92-bundle\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83gjfvn\" (UID: \"ccb39b5c-9a0b-4ce9-a83c-a41fda667b92\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83gjfvn" Dec 06 15:45:50 crc kubenswrapper[5003]: I1206 15:45:50.081063 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/ccb39b5c-9a0b-4ce9-a83c-a41fda667b92-bundle\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83gjfvn\" (UID: \"ccb39b5c-9a0b-4ce9-a83c-a41fda667b92\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83gjfvn" Dec 06 15:45:50 crc kubenswrapper[5003]: I1206 15:45:50.081187 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/ccb39b5c-9a0b-4ce9-a83c-a41fda667b92-util\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83gjfvn\" (UID: \"ccb39b5c-9a0b-4ce9-a83c-a41fda667b92\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83gjfvn" Dec 06 15:45:50 crc kubenswrapper[5003]: I1206 15:45:50.081242 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9z74j\" (UniqueName: \"kubernetes.io/projected/ccb39b5c-9a0b-4ce9-a83c-a41fda667b92-kube-api-access-9z74j\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83gjfvn\" (UID: \"ccb39b5c-9a0b-4ce9-a83c-a41fda667b92\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83gjfvn" Dec 06 15:45:50 crc kubenswrapper[5003]: I1206 15:45:50.082355 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/ccb39b5c-9a0b-4ce9-a83c-a41fda667b92-bundle\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83gjfvn\" (UID: \"ccb39b5c-9a0b-4ce9-a83c-a41fda667b92\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83gjfvn" Dec 06 15:45:50 crc kubenswrapper[5003]: I1206 15:45:50.082696 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/ccb39b5c-9a0b-4ce9-a83c-a41fda667b92-util\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83gjfvn\" (UID: \"ccb39b5c-9a0b-4ce9-a83c-a41fda667b92\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83gjfvn" Dec 06 15:45:50 crc kubenswrapper[5003]: I1206 15:45:50.108773 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9z74j\" (UniqueName: \"kubernetes.io/projected/ccb39b5c-9a0b-4ce9-a83c-a41fda667b92-kube-api-access-9z74j\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83gjfvn\" (UID: \"ccb39b5c-9a0b-4ce9-a83c-a41fda667b92\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83gjfvn" Dec 06 15:45:50 crc kubenswrapper[5003]: I1206 15:45:50.160536 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83gjfvn" Dec 06 15:45:50 crc kubenswrapper[5003]: I1206 15:45:50.363945 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83gjfvn"] Dec 06 15:45:50 crc kubenswrapper[5003]: W1206 15:45:50.371073 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podccb39b5c_9a0b_4ce9_a83c_a41fda667b92.slice/crio-d47b5550bda04d6c7efc864561de6262a1db0df93f4d81e9c689a71db194cae9 WatchSource:0}: Error finding container d47b5550bda04d6c7efc864561de6262a1db0df93f4d81e9c689a71db194cae9: Status 404 returned error can't find the container with id d47b5550bda04d6c7efc864561de6262a1db0df93f4d81e9c689a71db194cae9 Dec 06 15:45:51 crc kubenswrapper[5003]: I1206 15:45:51.039085 5003 generic.go:334] "Generic (PLEG): container finished" podID="ccb39b5c-9a0b-4ce9-a83c-a41fda667b92" containerID="a53dd0e1ea029e3a4ef35189f82df7aee9368defc2367c998079d8782f460abd" exitCode=0 Dec 06 15:45:51 crc kubenswrapper[5003]: I1206 15:45:51.039136 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83gjfvn" event={"ID":"ccb39b5c-9a0b-4ce9-a83c-a41fda667b92","Type":"ContainerDied","Data":"a53dd0e1ea029e3a4ef35189f82df7aee9368defc2367c998079d8782f460abd"} Dec 06 15:45:51 crc kubenswrapper[5003]: I1206 15:45:51.039169 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83gjfvn" event={"ID":"ccb39b5c-9a0b-4ce9-a83c-a41fda667b92","Type":"ContainerStarted","Data":"d47b5550bda04d6c7efc864561de6262a1db0df93f4d81e9c689a71db194cae9"} Dec 06 15:45:51 crc kubenswrapper[5003]: I1206 15:45:51.041022 5003 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 06 15:45:52 crc kubenswrapper[5003]: I1206 15:45:52.215970 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-d2lpv"] Dec 06 15:45:52 crc kubenswrapper[5003]: I1206 15:45:52.217867 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-d2lpv" Dec 06 15:45:52 crc kubenswrapper[5003]: I1206 15:45:52.258123 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-d2lpv"] Dec 06 15:45:52 crc kubenswrapper[5003]: I1206 15:45:52.311436 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qb9wb\" (UniqueName: \"kubernetes.io/projected/d29ab405-2a8c-4783-95f1-f48f4bd04bf2-kube-api-access-qb9wb\") pod \"redhat-operators-d2lpv\" (UID: \"d29ab405-2a8c-4783-95f1-f48f4bd04bf2\") " pod="openshift-marketplace/redhat-operators-d2lpv" Dec 06 15:45:52 crc kubenswrapper[5003]: I1206 15:45:52.311586 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d29ab405-2a8c-4783-95f1-f48f4bd04bf2-utilities\") pod \"redhat-operators-d2lpv\" (UID: \"d29ab405-2a8c-4783-95f1-f48f4bd04bf2\") " pod="openshift-marketplace/redhat-operators-d2lpv" Dec 06 15:45:52 crc kubenswrapper[5003]: I1206 15:45:52.311632 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d29ab405-2a8c-4783-95f1-f48f4bd04bf2-catalog-content\") pod \"redhat-operators-d2lpv\" (UID: \"d29ab405-2a8c-4783-95f1-f48f4bd04bf2\") " pod="openshift-marketplace/redhat-operators-d2lpv" Dec 06 15:45:52 crc kubenswrapper[5003]: I1206 15:45:52.414214 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d29ab405-2a8c-4783-95f1-f48f4bd04bf2-utilities\") pod \"redhat-operators-d2lpv\" (UID: \"d29ab405-2a8c-4783-95f1-f48f4bd04bf2\") " pod="openshift-marketplace/redhat-operators-d2lpv" Dec 06 15:45:52 crc kubenswrapper[5003]: I1206 15:45:52.414285 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d29ab405-2a8c-4783-95f1-f48f4bd04bf2-catalog-content\") pod \"redhat-operators-d2lpv\" (UID: \"d29ab405-2a8c-4783-95f1-f48f4bd04bf2\") " pod="openshift-marketplace/redhat-operators-d2lpv" Dec 06 15:45:52 crc kubenswrapper[5003]: I1206 15:45:52.414330 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qb9wb\" (UniqueName: \"kubernetes.io/projected/d29ab405-2a8c-4783-95f1-f48f4bd04bf2-kube-api-access-qb9wb\") pod \"redhat-operators-d2lpv\" (UID: \"d29ab405-2a8c-4783-95f1-f48f4bd04bf2\") " pod="openshift-marketplace/redhat-operators-d2lpv" Dec 06 15:45:52 crc kubenswrapper[5003]: I1206 15:45:52.415106 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d29ab405-2a8c-4783-95f1-f48f4bd04bf2-utilities\") pod \"redhat-operators-d2lpv\" (UID: \"d29ab405-2a8c-4783-95f1-f48f4bd04bf2\") " pod="openshift-marketplace/redhat-operators-d2lpv" Dec 06 15:45:52 crc kubenswrapper[5003]: I1206 15:45:52.415324 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d29ab405-2a8c-4783-95f1-f48f4bd04bf2-catalog-content\") pod \"redhat-operators-d2lpv\" (UID: \"d29ab405-2a8c-4783-95f1-f48f4bd04bf2\") " pod="openshift-marketplace/redhat-operators-d2lpv" Dec 06 15:45:52 crc kubenswrapper[5003]: I1206 15:45:52.434230 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qb9wb\" (UniqueName: \"kubernetes.io/projected/d29ab405-2a8c-4783-95f1-f48f4bd04bf2-kube-api-access-qb9wb\") pod \"redhat-operators-d2lpv\" (UID: \"d29ab405-2a8c-4783-95f1-f48f4bd04bf2\") " pod="openshift-marketplace/redhat-operators-d2lpv" Dec 06 15:45:52 crc kubenswrapper[5003]: I1206 15:45:52.559059 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-d2lpv" Dec 06 15:45:52 crc kubenswrapper[5003]: I1206 15:45:52.744335 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-d2lpv"] Dec 06 15:45:52 crc kubenswrapper[5003]: W1206 15:45:52.753563 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd29ab405_2a8c_4783_95f1_f48f4bd04bf2.slice/crio-ffcb4244fef9d7d32c32b4e9bebc5bc0309d0aea93306b93c110e2fda846a240 WatchSource:0}: Error finding container ffcb4244fef9d7d32c32b4e9bebc5bc0309d0aea93306b93c110e2fda846a240: Status 404 returned error can't find the container with id ffcb4244fef9d7d32c32b4e9bebc5bc0309d0aea93306b93c110e2fda846a240 Dec 06 15:45:53 crc kubenswrapper[5003]: I1206 15:45:53.053397 5003 generic.go:334] "Generic (PLEG): container finished" podID="ccb39b5c-9a0b-4ce9-a83c-a41fda667b92" containerID="614d30d0e2815bfea9409903fbf4562b8d3a51c8e11f3e0063fcb0e72e298276" exitCode=0 Dec 06 15:45:53 crc kubenswrapper[5003]: I1206 15:45:53.053522 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83gjfvn" event={"ID":"ccb39b5c-9a0b-4ce9-a83c-a41fda667b92","Type":"ContainerDied","Data":"614d30d0e2815bfea9409903fbf4562b8d3a51c8e11f3e0063fcb0e72e298276"} Dec 06 15:45:53 crc kubenswrapper[5003]: I1206 15:45:53.055798 5003 generic.go:334] "Generic (PLEG): container finished" podID="d29ab405-2a8c-4783-95f1-f48f4bd04bf2" containerID="0ee2e5ff722dcc09c6bd7cb9566754b3b798c02ec03990dab193828bcfd5e363" exitCode=0 Dec 06 15:45:53 crc kubenswrapper[5003]: I1206 15:45:53.055834 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-d2lpv" event={"ID":"d29ab405-2a8c-4783-95f1-f48f4bd04bf2","Type":"ContainerDied","Data":"0ee2e5ff722dcc09c6bd7cb9566754b3b798c02ec03990dab193828bcfd5e363"} Dec 06 15:45:53 crc kubenswrapper[5003]: I1206 15:45:53.055870 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-d2lpv" event={"ID":"d29ab405-2a8c-4783-95f1-f48f4bd04bf2","Type":"ContainerStarted","Data":"ffcb4244fef9d7d32c32b4e9bebc5bc0309d0aea93306b93c110e2fda846a240"} Dec 06 15:45:54 crc kubenswrapper[5003]: I1206 15:45:54.062684 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-d2lpv" event={"ID":"d29ab405-2a8c-4783-95f1-f48f4bd04bf2","Type":"ContainerStarted","Data":"a04e5430c343a3ce57e9d08fa9a2136a6cd444191f7ad08377dd2ea8180043b2"} Dec 06 15:45:54 crc kubenswrapper[5003]: I1206 15:45:54.065399 5003 generic.go:334] "Generic (PLEG): container finished" podID="ccb39b5c-9a0b-4ce9-a83c-a41fda667b92" containerID="d6043b8278607d1707fde50480ef2b02ab22305e51f3c84bf014654bd37105a3" exitCode=0 Dec 06 15:45:54 crc kubenswrapper[5003]: I1206 15:45:54.065440 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83gjfvn" event={"ID":"ccb39b5c-9a0b-4ce9-a83c-a41fda667b92","Type":"ContainerDied","Data":"d6043b8278607d1707fde50480ef2b02ab22305e51f3c84bf014654bd37105a3"} Dec 06 15:45:54 crc kubenswrapper[5003]: I1206 15:45:54.150304 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-cdmjt" Dec 06 15:45:55 crc kubenswrapper[5003]: I1206 15:45:55.072236 5003 generic.go:334] "Generic (PLEG): container finished" podID="d29ab405-2a8c-4783-95f1-f48f4bd04bf2" containerID="a04e5430c343a3ce57e9d08fa9a2136a6cd444191f7ad08377dd2ea8180043b2" exitCode=0 Dec 06 15:45:55 crc kubenswrapper[5003]: I1206 15:45:55.072331 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-d2lpv" event={"ID":"d29ab405-2a8c-4783-95f1-f48f4bd04bf2","Type":"ContainerDied","Data":"a04e5430c343a3ce57e9d08fa9a2136a6cd444191f7ad08377dd2ea8180043b2"} Dec 06 15:45:55 crc kubenswrapper[5003]: I1206 15:45:55.291599 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83gjfvn" Dec 06 15:45:55 crc kubenswrapper[5003]: I1206 15:45:55.357628 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9z74j\" (UniqueName: \"kubernetes.io/projected/ccb39b5c-9a0b-4ce9-a83c-a41fda667b92-kube-api-access-9z74j\") pod \"ccb39b5c-9a0b-4ce9-a83c-a41fda667b92\" (UID: \"ccb39b5c-9a0b-4ce9-a83c-a41fda667b92\") " Dec 06 15:45:55 crc kubenswrapper[5003]: I1206 15:45:55.357702 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/ccb39b5c-9a0b-4ce9-a83c-a41fda667b92-util\") pod \"ccb39b5c-9a0b-4ce9-a83c-a41fda667b92\" (UID: \"ccb39b5c-9a0b-4ce9-a83c-a41fda667b92\") " Dec 06 15:45:55 crc kubenswrapper[5003]: I1206 15:45:55.357816 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/ccb39b5c-9a0b-4ce9-a83c-a41fda667b92-bundle\") pod \"ccb39b5c-9a0b-4ce9-a83c-a41fda667b92\" (UID: \"ccb39b5c-9a0b-4ce9-a83c-a41fda667b92\") " Dec 06 15:45:55 crc kubenswrapper[5003]: I1206 15:45:55.358871 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ccb39b5c-9a0b-4ce9-a83c-a41fda667b92-bundle" (OuterVolumeSpecName: "bundle") pod "ccb39b5c-9a0b-4ce9-a83c-a41fda667b92" (UID: "ccb39b5c-9a0b-4ce9-a83c-a41fda667b92"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 15:45:55 crc kubenswrapper[5003]: I1206 15:45:55.365273 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ccb39b5c-9a0b-4ce9-a83c-a41fda667b92-kube-api-access-9z74j" (OuterVolumeSpecName: "kube-api-access-9z74j") pod "ccb39b5c-9a0b-4ce9-a83c-a41fda667b92" (UID: "ccb39b5c-9a0b-4ce9-a83c-a41fda667b92"). InnerVolumeSpecName "kube-api-access-9z74j". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 15:45:55 crc kubenswrapper[5003]: I1206 15:45:55.373179 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ccb39b5c-9a0b-4ce9-a83c-a41fda667b92-util" (OuterVolumeSpecName: "util") pod "ccb39b5c-9a0b-4ce9-a83c-a41fda667b92" (UID: "ccb39b5c-9a0b-4ce9-a83c-a41fda667b92"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 15:45:55 crc kubenswrapper[5003]: I1206 15:45:55.459295 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9z74j\" (UniqueName: \"kubernetes.io/projected/ccb39b5c-9a0b-4ce9-a83c-a41fda667b92-kube-api-access-9z74j\") on node \"crc\" DevicePath \"\"" Dec 06 15:45:55 crc kubenswrapper[5003]: I1206 15:45:55.459349 5003 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/ccb39b5c-9a0b-4ce9-a83c-a41fda667b92-util\") on node \"crc\" DevicePath \"\"" Dec 06 15:45:55 crc kubenswrapper[5003]: I1206 15:45:55.459368 5003 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/ccb39b5c-9a0b-4ce9-a83c-a41fda667b92-bundle\") on node \"crc\" DevicePath \"\"" Dec 06 15:45:56 crc kubenswrapper[5003]: I1206 15:45:56.081777 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-d2lpv" event={"ID":"d29ab405-2a8c-4783-95f1-f48f4bd04bf2","Type":"ContainerStarted","Data":"4928f074d1ce03c68189d2bf5f80c197902db629d6a0fb5794e1db1a652b0124"} Dec 06 15:45:56 crc kubenswrapper[5003]: I1206 15:45:56.084318 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83gjfvn" event={"ID":"ccb39b5c-9a0b-4ce9-a83c-a41fda667b92","Type":"ContainerDied","Data":"d47b5550bda04d6c7efc864561de6262a1db0df93f4d81e9c689a71db194cae9"} Dec 06 15:45:56 crc kubenswrapper[5003]: I1206 15:45:56.084446 5003 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d47b5550bda04d6c7efc864561de6262a1db0df93f4d81e9c689a71db194cae9" Dec 06 15:45:56 crc kubenswrapper[5003]: I1206 15:45:56.084394 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83gjfvn" Dec 06 15:45:56 crc kubenswrapper[5003]: I1206 15:45:56.105884 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-d2lpv" podStartSLOduration=1.594458676 podStartE2EDuration="4.105859943s" podCreationTimestamp="2025-12-06 15:45:52 +0000 UTC" firstStartedPulling="2025-12-06 15:45:53.056818143 +0000 UTC m=+831.590172524" lastFinishedPulling="2025-12-06 15:45:55.5682194 +0000 UTC m=+834.101573791" observedRunningTime="2025-12-06 15:45:56.104711462 +0000 UTC m=+834.638065873" watchObservedRunningTime="2025-12-06 15:45:56.105859943 +0000 UTC m=+834.639214334" Dec 06 15:46:02 crc kubenswrapper[5003]: I1206 15:46:02.066942 5003 scope.go:117] "RemoveContainer" containerID="22d16feb3425c5cac7562c4468723b0aae567d2d31db5516b3b0ce7d38d91c6b" Dec 06 15:46:02 crc kubenswrapper[5003]: I1206 15:46:02.559525 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-d2lpv" Dec 06 15:46:02 crc kubenswrapper[5003]: I1206 15:46:02.559575 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-d2lpv" Dec 06 15:46:02 crc kubenswrapper[5003]: I1206 15:46:02.608242 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-d2lpv" Dec 06 15:46:03 crc kubenswrapper[5003]: I1206 15:46:03.206316 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-d2lpv" Dec 06 15:46:04 crc kubenswrapper[5003]: I1206 15:46:04.132022 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-9kdpn_350e8b9a-b7bf-4dc9-abe9-d10f7a088be3/kube-multus/2.log" Dec 06 15:46:04 crc kubenswrapper[5003]: I1206 15:46:04.796314 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-d2lpv"] Dec 06 15:46:05 crc kubenswrapper[5003]: I1206 15:46:05.136630 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-d2lpv" podUID="d29ab405-2a8c-4783-95f1-f48f4bd04bf2" containerName="registry-server" containerID="cri-o://4928f074d1ce03c68189d2bf5f80c197902db629d6a0fb5794e1db1a652b0124" gracePeriod=2 Dec 06 15:46:07 crc kubenswrapper[5003]: I1206 15:46:07.149268 5003 generic.go:334] "Generic (PLEG): container finished" podID="d29ab405-2a8c-4783-95f1-f48f4bd04bf2" containerID="4928f074d1ce03c68189d2bf5f80c197902db629d6a0fb5794e1db1a652b0124" exitCode=0 Dec 06 15:46:07 crc kubenswrapper[5003]: I1206 15:46:07.149306 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-d2lpv" event={"ID":"d29ab405-2a8c-4783-95f1-f48f4bd04bf2","Type":"ContainerDied","Data":"4928f074d1ce03c68189d2bf5f80c197902db629d6a0fb5794e1db1a652b0124"} Dec 06 15:46:07 crc kubenswrapper[5003]: I1206 15:46:07.430593 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-d2lpv" Dec 06 15:46:07 crc kubenswrapper[5003]: I1206 15:46:07.547838 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qb9wb\" (UniqueName: \"kubernetes.io/projected/d29ab405-2a8c-4783-95f1-f48f4bd04bf2-kube-api-access-qb9wb\") pod \"d29ab405-2a8c-4783-95f1-f48f4bd04bf2\" (UID: \"d29ab405-2a8c-4783-95f1-f48f4bd04bf2\") " Dec 06 15:46:07 crc kubenswrapper[5003]: I1206 15:46:07.547922 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d29ab405-2a8c-4783-95f1-f48f4bd04bf2-catalog-content\") pod \"d29ab405-2a8c-4783-95f1-f48f4bd04bf2\" (UID: \"d29ab405-2a8c-4783-95f1-f48f4bd04bf2\") " Dec 06 15:46:07 crc kubenswrapper[5003]: I1206 15:46:07.547958 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d29ab405-2a8c-4783-95f1-f48f4bd04bf2-utilities\") pod \"d29ab405-2a8c-4783-95f1-f48f4bd04bf2\" (UID: \"d29ab405-2a8c-4783-95f1-f48f4bd04bf2\") " Dec 06 15:46:07 crc kubenswrapper[5003]: I1206 15:46:07.548952 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d29ab405-2a8c-4783-95f1-f48f4bd04bf2-utilities" (OuterVolumeSpecName: "utilities") pod "d29ab405-2a8c-4783-95f1-f48f4bd04bf2" (UID: "d29ab405-2a8c-4783-95f1-f48f4bd04bf2"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 15:46:07 crc kubenswrapper[5003]: I1206 15:46:07.555274 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d29ab405-2a8c-4783-95f1-f48f4bd04bf2-kube-api-access-qb9wb" (OuterVolumeSpecName: "kube-api-access-qb9wb") pod "d29ab405-2a8c-4783-95f1-f48f4bd04bf2" (UID: "d29ab405-2a8c-4783-95f1-f48f4bd04bf2"). InnerVolumeSpecName "kube-api-access-qb9wb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 15:46:07 crc kubenswrapper[5003]: I1206 15:46:07.649985 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qb9wb\" (UniqueName: \"kubernetes.io/projected/d29ab405-2a8c-4783-95f1-f48f4bd04bf2-kube-api-access-qb9wb\") on node \"crc\" DevicePath \"\"" Dec 06 15:46:07 crc kubenswrapper[5003]: I1206 15:46:07.650035 5003 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d29ab405-2a8c-4783-95f1-f48f4bd04bf2-utilities\") on node \"crc\" DevicePath \"\"" Dec 06 15:46:07 crc kubenswrapper[5003]: I1206 15:46:07.671096 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d29ab405-2a8c-4783-95f1-f48f4bd04bf2-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "d29ab405-2a8c-4783-95f1-f48f4bd04bf2" (UID: "d29ab405-2a8c-4783-95f1-f48f4bd04bf2"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 15:46:07 crc kubenswrapper[5003]: I1206 15:46:07.735604 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-controller-manager-6fb7b5787c-jtd2n"] Dec 06 15:46:07 crc kubenswrapper[5003]: E1206 15:46:07.735797 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d29ab405-2a8c-4783-95f1-f48f4bd04bf2" containerName="registry-server" Dec 06 15:46:07 crc kubenswrapper[5003]: I1206 15:46:07.735810 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="d29ab405-2a8c-4783-95f1-f48f4bd04bf2" containerName="registry-server" Dec 06 15:46:07 crc kubenswrapper[5003]: E1206 15:46:07.735823 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ccb39b5c-9a0b-4ce9-a83c-a41fda667b92" containerName="pull" Dec 06 15:46:07 crc kubenswrapper[5003]: I1206 15:46:07.735829 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="ccb39b5c-9a0b-4ce9-a83c-a41fda667b92" containerName="pull" Dec 06 15:46:07 crc kubenswrapper[5003]: E1206 15:46:07.735842 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d29ab405-2a8c-4783-95f1-f48f4bd04bf2" containerName="extract-utilities" Dec 06 15:46:07 crc kubenswrapper[5003]: I1206 15:46:07.735847 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="d29ab405-2a8c-4783-95f1-f48f4bd04bf2" containerName="extract-utilities" Dec 06 15:46:07 crc kubenswrapper[5003]: E1206 15:46:07.735855 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d29ab405-2a8c-4783-95f1-f48f4bd04bf2" containerName="extract-content" Dec 06 15:46:07 crc kubenswrapper[5003]: I1206 15:46:07.735861 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="d29ab405-2a8c-4783-95f1-f48f4bd04bf2" containerName="extract-content" Dec 06 15:46:07 crc kubenswrapper[5003]: E1206 15:46:07.735870 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ccb39b5c-9a0b-4ce9-a83c-a41fda667b92" containerName="extract" Dec 06 15:46:07 crc kubenswrapper[5003]: I1206 15:46:07.735876 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="ccb39b5c-9a0b-4ce9-a83c-a41fda667b92" containerName="extract" Dec 06 15:46:07 crc kubenswrapper[5003]: E1206 15:46:07.735884 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ccb39b5c-9a0b-4ce9-a83c-a41fda667b92" containerName="util" Dec 06 15:46:07 crc kubenswrapper[5003]: I1206 15:46:07.735889 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="ccb39b5c-9a0b-4ce9-a83c-a41fda667b92" containerName="util" Dec 06 15:46:07 crc kubenswrapper[5003]: I1206 15:46:07.735973 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="d29ab405-2a8c-4783-95f1-f48f4bd04bf2" containerName="registry-server" Dec 06 15:46:07 crc kubenswrapper[5003]: I1206 15:46:07.735987 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="ccb39b5c-9a0b-4ce9-a83c-a41fda667b92" containerName="extract" Dec 06 15:46:07 crc kubenswrapper[5003]: I1206 15:46:07.736330 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-6fb7b5787c-jtd2n" Dec 06 15:46:07 crc kubenswrapper[5003]: I1206 15:46:07.739068 5003 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"manager-account-dockercfg-ss8gt" Dec 06 15:46:07 crc kubenswrapper[5003]: I1206 15:46:07.749761 5003 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-cert" Dec 06 15:46:07 crc kubenswrapper[5003]: I1206 15:46:07.750043 5003 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-controller-manager-service-cert" Dec 06 15:46:07 crc kubenswrapper[5003]: I1206 15:46:07.750698 5003 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d29ab405-2a8c-4783-95f1-f48f4bd04bf2-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 06 15:46:07 crc kubenswrapper[5003]: I1206 15:46:07.750738 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"openshift-service-ca.crt" Dec 06 15:46:07 crc kubenswrapper[5003]: I1206 15:46:07.750738 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"kube-root-ca.crt" Dec 06 15:46:07 crc kubenswrapper[5003]: I1206 15:46:07.766418 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-6fb7b5787c-jtd2n"] Dec 06 15:46:07 crc kubenswrapper[5003]: I1206 15:46:07.852237 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/e1a24ae6-f251-42bc-bb3e-1d9bd03dd13e-webhook-cert\") pod \"metallb-operator-controller-manager-6fb7b5787c-jtd2n\" (UID: \"e1a24ae6-f251-42bc-bb3e-1d9bd03dd13e\") " pod="metallb-system/metallb-operator-controller-manager-6fb7b5787c-jtd2n" Dec 06 15:46:07 crc kubenswrapper[5003]: I1206 15:46:07.852306 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jtcjm\" (UniqueName: \"kubernetes.io/projected/e1a24ae6-f251-42bc-bb3e-1d9bd03dd13e-kube-api-access-jtcjm\") pod \"metallb-operator-controller-manager-6fb7b5787c-jtd2n\" (UID: \"e1a24ae6-f251-42bc-bb3e-1d9bd03dd13e\") " pod="metallb-system/metallb-operator-controller-manager-6fb7b5787c-jtd2n" Dec 06 15:46:07 crc kubenswrapper[5003]: I1206 15:46:07.852594 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/e1a24ae6-f251-42bc-bb3e-1d9bd03dd13e-apiservice-cert\") pod \"metallb-operator-controller-manager-6fb7b5787c-jtd2n\" (UID: \"e1a24ae6-f251-42bc-bb3e-1d9bd03dd13e\") " pod="metallb-system/metallb-operator-controller-manager-6fb7b5787c-jtd2n" Dec 06 15:46:07 crc kubenswrapper[5003]: I1206 15:46:07.953839 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/e1a24ae6-f251-42bc-bb3e-1d9bd03dd13e-apiservice-cert\") pod \"metallb-operator-controller-manager-6fb7b5787c-jtd2n\" (UID: \"e1a24ae6-f251-42bc-bb3e-1d9bd03dd13e\") " pod="metallb-system/metallb-operator-controller-manager-6fb7b5787c-jtd2n" Dec 06 15:46:07 crc kubenswrapper[5003]: I1206 15:46:07.953908 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/e1a24ae6-f251-42bc-bb3e-1d9bd03dd13e-webhook-cert\") pod \"metallb-operator-controller-manager-6fb7b5787c-jtd2n\" (UID: \"e1a24ae6-f251-42bc-bb3e-1d9bd03dd13e\") " pod="metallb-system/metallb-operator-controller-manager-6fb7b5787c-jtd2n" Dec 06 15:46:07 crc kubenswrapper[5003]: I1206 15:46:07.953945 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jtcjm\" (UniqueName: \"kubernetes.io/projected/e1a24ae6-f251-42bc-bb3e-1d9bd03dd13e-kube-api-access-jtcjm\") pod \"metallb-operator-controller-manager-6fb7b5787c-jtd2n\" (UID: \"e1a24ae6-f251-42bc-bb3e-1d9bd03dd13e\") " pod="metallb-system/metallb-operator-controller-manager-6fb7b5787c-jtd2n" Dec 06 15:46:07 crc kubenswrapper[5003]: I1206 15:46:07.958095 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/e1a24ae6-f251-42bc-bb3e-1d9bd03dd13e-apiservice-cert\") pod \"metallb-operator-controller-manager-6fb7b5787c-jtd2n\" (UID: \"e1a24ae6-f251-42bc-bb3e-1d9bd03dd13e\") " pod="metallb-system/metallb-operator-controller-manager-6fb7b5787c-jtd2n" Dec 06 15:46:07 crc kubenswrapper[5003]: I1206 15:46:07.958095 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/e1a24ae6-f251-42bc-bb3e-1d9bd03dd13e-webhook-cert\") pod \"metallb-operator-controller-manager-6fb7b5787c-jtd2n\" (UID: \"e1a24ae6-f251-42bc-bb3e-1d9bd03dd13e\") " pod="metallb-system/metallb-operator-controller-manager-6fb7b5787c-jtd2n" Dec 06 15:46:07 crc kubenswrapper[5003]: I1206 15:46:07.984167 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-webhook-server-576499f99b-6pz7r"] Dec 06 15:46:07 crc kubenswrapper[5003]: I1206 15:46:07.984969 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-576499f99b-6pz7r" Dec 06 15:46:07 crc kubenswrapper[5003]: I1206 15:46:07.987965 5003 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-dockercfg-258kb" Dec 06 15:46:07 crc kubenswrapper[5003]: I1206 15:46:07.987968 5003 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-service-cert" Dec 06 15:46:07 crc kubenswrapper[5003]: I1206 15:46:07.988043 5003 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Dec 06 15:46:07 crc kubenswrapper[5003]: I1206 15:46:07.993392 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jtcjm\" (UniqueName: \"kubernetes.io/projected/e1a24ae6-f251-42bc-bb3e-1d9bd03dd13e-kube-api-access-jtcjm\") pod \"metallb-operator-controller-manager-6fb7b5787c-jtd2n\" (UID: \"e1a24ae6-f251-42bc-bb3e-1d9bd03dd13e\") " pod="metallb-system/metallb-operator-controller-manager-6fb7b5787c-jtd2n" Dec 06 15:46:08 crc kubenswrapper[5003]: I1206 15:46:08.002935 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-576499f99b-6pz7r"] Dec 06 15:46:08 crc kubenswrapper[5003]: I1206 15:46:08.049360 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-6fb7b5787c-jtd2n" Dec 06 15:46:08 crc kubenswrapper[5003]: I1206 15:46:08.155802 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/49994fd5-c0aa-446a-b546-d3e0acc4fa81-webhook-cert\") pod \"metallb-operator-webhook-server-576499f99b-6pz7r\" (UID: \"49994fd5-c0aa-446a-b546-d3e0acc4fa81\") " pod="metallb-system/metallb-operator-webhook-server-576499f99b-6pz7r" Dec 06 15:46:08 crc kubenswrapper[5003]: I1206 15:46:08.156194 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lpkcq\" (UniqueName: \"kubernetes.io/projected/49994fd5-c0aa-446a-b546-d3e0acc4fa81-kube-api-access-lpkcq\") pod \"metallb-operator-webhook-server-576499f99b-6pz7r\" (UID: \"49994fd5-c0aa-446a-b546-d3e0acc4fa81\") " pod="metallb-system/metallb-operator-webhook-server-576499f99b-6pz7r" Dec 06 15:46:08 crc kubenswrapper[5003]: I1206 15:46:08.156253 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/49994fd5-c0aa-446a-b546-d3e0acc4fa81-apiservice-cert\") pod \"metallb-operator-webhook-server-576499f99b-6pz7r\" (UID: \"49994fd5-c0aa-446a-b546-d3e0acc4fa81\") " pod="metallb-system/metallb-operator-webhook-server-576499f99b-6pz7r" Dec 06 15:46:08 crc kubenswrapper[5003]: I1206 15:46:08.158989 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-d2lpv" Dec 06 15:46:08 crc kubenswrapper[5003]: I1206 15:46:08.163952 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-d2lpv" event={"ID":"d29ab405-2a8c-4783-95f1-f48f4bd04bf2","Type":"ContainerDied","Data":"ffcb4244fef9d7d32c32b4e9bebc5bc0309d0aea93306b93c110e2fda846a240"} Dec 06 15:46:08 crc kubenswrapper[5003]: I1206 15:46:08.164081 5003 scope.go:117] "RemoveContainer" containerID="4928f074d1ce03c68189d2bf5f80c197902db629d6a0fb5794e1db1a652b0124" Dec 06 15:46:08 crc kubenswrapper[5003]: I1206 15:46:08.182837 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-d2lpv"] Dec 06 15:46:08 crc kubenswrapper[5003]: I1206 15:46:08.191948 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-d2lpv"] Dec 06 15:46:08 crc kubenswrapper[5003]: I1206 15:46:08.200611 5003 scope.go:117] "RemoveContainer" containerID="a04e5430c343a3ce57e9d08fa9a2136a6cd444191f7ad08377dd2ea8180043b2" Dec 06 15:46:08 crc kubenswrapper[5003]: I1206 15:46:08.232516 5003 scope.go:117] "RemoveContainer" containerID="0ee2e5ff722dcc09c6bd7cb9566754b3b798c02ec03990dab193828bcfd5e363" Dec 06 15:46:08 crc kubenswrapper[5003]: I1206 15:46:08.257775 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/49994fd5-c0aa-446a-b546-d3e0acc4fa81-webhook-cert\") pod \"metallb-operator-webhook-server-576499f99b-6pz7r\" (UID: \"49994fd5-c0aa-446a-b546-d3e0acc4fa81\") " pod="metallb-system/metallb-operator-webhook-server-576499f99b-6pz7r" Dec 06 15:46:08 crc kubenswrapper[5003]: I1206 15:46:08.257842 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lpkcq\" (UniqueName: \"kubernetes.io/projected/49994fd5-c0aa-446a-b546-d3e0acc4fa81-kube-api-access-lpkcq\") pod \"metallb-operator-webhook-server-576499f99b-6pz7r\" (UID: \"49994fd5-c0aa-446a-b546-d3e0acc4fa81\") " pod="metallb-system/metallb-operator-webhook-server-576499f99b-6pz7r" Dec 06 15:46:08 crc kubenswrapper[5003]: I1206 15:46:08.257950 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/49994fd5-c0aa-446a-b546-d3e0acc4fa81-apiservice-cert\") pod \"metallb-operator-webhook-server-576499f99b-6pz7r\" (UID: \"49994fd5-c0aa-446a-b546-d3e0acc4fa81\") " pod="metallb-system/metallb-operator-webhook-server-576499f99b-6pz7r" Dec 06 15:46:08 crc kubenswrapper[5003]: I1206 15:46:08.262152 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/49994fd5-c0aa-446a-b546-d3e0acc4fa81-webhook-cert\") pod \"metallb-operator-webhook-server-576499f99b-6pz7r\" (UID: \"49994fd5-c0aa-446a-b546-d3e0acc4fa81\") " pod="metallb-system/metallb-operator-webhook-server-576499f99b-6pz7r" Dec 06 15:46:08 crc kubenswrapper[5003]: I1206 15:46:08.264986 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/49994fd5-c0aa-446a-b546-d3e0acc4fa81-apiservice-cert\") pod \"metallb-operator-webhook-server-576499f99b-6pz7r\" (UID: \"49994fd5-c0aa-446a-b546-d3e0acc4fa81\") " pod="metallb-system/metallb-operator-webhook-server-576499f99b-6pz7r" Dec 06 15:46:08 crc kubenswrapper[5003]: I1206 15:46:08.271077 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-6fb7b5787c-jtd2n"] Dec 06 15:46:08 crc kubenswrapper[5003]: I1206 15:46:08.278155 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lpkcq\" (UniqueName: \"kubernetes.io/projected/49994fd5-c0aa-446a-b546-d3e0acc4fa81-kube-api-access-lpkcq\") pod \"metallb-operator-webhook-server-576499f99b-6pz7r\" (UID: \"49994fd5-c0aa-446a-b546-d3e0acc4fa81\") " pod="metallb-system/metallb-operator-webhook-server-576499f99b-6pz7r" Dec 06 15:46:08 crc kubenswrapper[5003]: W1206 15:46:08.286923 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode1a24ae6_f251_42bc_bb3e_1d9bd03dd13e.slice/crio-d1de3c5422a05bbb9b1edfe03c469abac44402279bb1fda14599908af94a4f01 WatchSource:0}: Error finding container d1de3c5422a05bbb9b1edfe03c469abac44402279bb1fda14599908af94a4f01: Status 404 returned error can't find the container with id d1de3c5422a05bbb9b1edfe03c469abac44402279bb1fda14599908af94a4f01 Dec 06 15:46:08 crc kubenswrapper[5003]: I1206 15:46:08.329855 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-576499f99b-6pz7r" Dec 06 15:46:08 crc kubenswrapper[5003]: I1206 15:46:08.565801 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-576499f99b-6pz7r"] Dec 06 15:46:09 crc kubenswrapper[5003]: I1206 15:46:09.167141 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-6fb7b5787c-jtd2n" event={"ID":"e1a24ae6-f251-42bc-bb3e-1d9bd03dd13e","Type":"ContainerStarted","Data":"d1de3c5422a05bbb9b1edfe03c469abac44402279bb1fda14599908af94a4f01"} Dec 06 15:46:09 crc kubenswrapper[5003]: I1206 15:46:09.168443 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-576499f99b-6pz7r" event={"ID":"49994fd5-c0aa-446a-b546-d3e0acc4fa81","Type":"ContainerStarted","Data":"f19834a14754bdf1dc80810cf4f1cfc82fcb3a6089737be89e85920e762230d0"} Dec 06 15:46:09 crc kubenswrapper[5003]: I1206 15:46:09.723190 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d29ab405-2a8c-4783-95f1-f48f4bd04bf2" path="/var/lib/kubelet/pods/d29ab405-2a8c-4783-95f1-f48f4bd04bf2/volumes" Dec 06 15:46:15 crc kubenswrapper[5003]: I1206 15:46:15.208841 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-576499f99b-6pz7r" event={"ID":"49994fd5-c0aa-446a-b546-d3e0acc4fa81","Type":"ContainerStarted","Data":"d4d66bd57ff1c25d4420444d07a4c6ce06e555f6e949766d653b4a431a1a97f2"} Dec 06 15:46:15 crc kubenswrapper[5003]: I1206 15:46:15.209475 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-webhook-server-576499f99b-6pz7r" Dec 06 15:46:15 crc kubenswrapper[5003]: I1206 15:46:15.244828 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-webhook-server-576499f99b-6pz7r" podStartSLOduration=1.801239932 podStartE2EDuration="8.244802954s" podCreationTimestamp="2025-12-06 15:46:07 +0000 UTC" firstStartedPulling="2025-12-06 15:46:08.578180907 +0000 UTC m=+847.111535288" lastFinishedPulling="2025-12-06 15:46:15.021743929 +0000 UTC m=+853.555098310" observedRunningTime="2025-12-06 15:46:15.238194602 +0000 UTC m=+853.771548993" watchObservedRunningTime="2025-12-06 15:46:15.244802954 +0000 UTC m=+853.778157335" Dec 06 15:46:17 crc kubenswrapper[5003]: I1206 15:46:17.226830 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-6fb7b5787c-jtd2n" event={"ID":"e1a24ae6-f251-42bc-bb3e-1d9bd03dd13e","Type":"ContainerStarted","Data":"4100782c8ea23538fc6c41d052f55c584ba986d92f8a68fb287673a0c23de1ac"} Dec 06 15:46:17 crc kubenswrapper[5003]: I1206 15:46:17.227206 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-controller-manager-6fb7b5787c-jtd2n" Dec 06 15:46:17 crc kubenswrapper[5003]: I1206 15:46:17.250146 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-controller-manager-6fb7b5787c-jtd2n" podStartSLOduration=2.003829044 podStartE2EDuration="10.250122371s" podCreationTimestamp="2025-12-06 15:46:07 +0000 UTC" firstStartedPulling="2025-12-06 15:46:08.289150707 +0000 UTC m=+846.822505088" lastFinishedPulling="2025-12-06 15:46:16.535444044 +0000 UTC m=+855.068798415" observedRunningTime="2025-12-06 15:46:17.246517062 +0000 UTC m=+855.779871483" watchObservedRunningTime="2025-12-06 15:46:17.250122371 +0000 UTC m=+855.783476762" Dec 06 15:46:28 crc kubenswrapper[5003]: I1206 15:46:28.341767 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-webhook-server-576499f99b-6pz7r" Dec 06 15:46:48 crc kubenswrapper[5003]: I1206 15:46:48.052647 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-controller-manager-6fb7b5787c-jtd2n" Dec 06 15:46:48 crc kubenswrapper[5003]: I1206 15:46:48.916872 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-webhook-server-7fcb986d4-2jzmn"] Dec 06 15:46:48 crc kubenswrapper[5003]: I1206 15:46:48.917571 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-2jzmn" Dec 06 15:46:48 crc kubenswrapper[5003]: I1206 15:46:48.919283 5003 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-webhook-server-cert" Dec 06 15:46:48 crc kubenswrapper[5003]: I1206 15:46:48.919606 5003 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-daemon-dockercfg-gj9dx" Dec 06 15:46:48 crc kubenswrapper[5003]: I1206 15:46:48.943795 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-fs94m"] Dec 06 15:46:48 crc kubenswrapper[5003]: I1206 15:46:48.952514 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-fs94m" Dec 06 15:46:48 crc kubenswrapper[5003]: I1206 15:46:48.964014 5003 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-certs-secret" Dec 06 15:46:48 crc kubenswrapper[5003]: I1206 15:46:48.964250 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"frr-startup" Dec 06 15:46:48 crc kubenswrapper[5003]: I1206 15:46:48.989118 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-7fcb986d4-2jzmn"] Dec 06 15:46:49 crc kubenswrapper[5003]: I1206 15:46:49.029343 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/speaker-vtb75"] Dec 06 15:46:49 crc kubenswrapper[5003]: I1206 15:46:49.030470 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-vtb75" Dec 06 15:46:49 crc kubenswrapper[5003]: I1206 15:46:49.037272 5003 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-certs-secret" Dec 06 15:46:49 crc kubenswrapper[5003]: I1206 15:46:49.037297 5003 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-dockercfg-s8wxj" Dec 06 15:46:49 crc kubenswrapper[5003]: I1206 15:46:49.037348 5003 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-memberlist" Dec 06 15:46:49 crc kubenswrapper[5003]: I1206 15:46:49.038233 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"metallb-excludel2" Dec 06 15:46:49 crc kubenswrapper[5003]: I1206 15:46:49.053197 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/controller-f8648f98b-2ddqw"] Dec 06 15:46:49 crc kubenswrapper[5003]: I1206 15:46:49.054305 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-f8648f98b-2ddqw" Dec 06 15:46:49 crc kubenswrapper[5003]: I1206 15:46:49.056906 5003 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-certs-secret" Dec 06 15:46:49 crc kubenswrapper[5003]: I1206 15:46:49.070214 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-f8648f98b-2ddqw"] Dec 06 15:46:49 crc kubenswrapper[5003]: I1206 15:46:49.086276 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jhbr8\" (UniqueName: \"kubernetes.io/projected/395612ce-6ba7-4b60-822c-dbae3eea5e7f-kube-api-access-jhbr8\") pod \"frr-k8s-fs94m\" (UID: \"395612ce-6ba7-4b60-822c-dbae3eea5e7f\") " pod="metallb-system/frr-k8s-fs94m" Dec 06 15:46:49 crc kubenswrapper[5003]: I1206 15:46:49.086346 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/395612ce-6ba7-4b60-822c-dbae3eea5e7f-metrics\") pod \"frr-k8s-fs94m\" (UID: \"395612ce-6ba7-4b60-822c-dbae3eea5e7f\") " pod="metallb-system/frr-k8s-fs94m" Dec 06 15:46:49 crc kubenswrapper[5003]: I1206 15:46:49.086390 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/8d4e8c3d-e37b-4489-bd61-84af9e792de1-cert\") pod \"frr-k8s-webhook-server-7fcb986d4-2jzmn\" (UID: \"8d4e8c3d-e37b-4489-bd61-84af9e792de1\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-2jzmn" Dec 06 15:46:49 crc kubenswrapper[5003]: I1206 15:46:49.086428 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4xf5m\" (UniqueName: \"kubernetes.io/projected/8d4e8c3d-e37b-4489-bd61-84af9e792de1-kube-api-access-4xf5m\") pod \"frr-k8s-webhook-server-7fcb986d4-2jzmn\" (UID: \"8d4e8c3d-e37b-4489-bd61-84af9e792de1\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-2jzmn" Dec 06 15:46:49 crc kubenswrapper[5003]: I1206 15:46:49.086507 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/395612ce-6ba7-4b60-822c-dbae3eea5e7f-metrics-certs\") pod \"frr-k8s-fs94m\" (UID: \"395612ce-6ba7-4b60-822c-dbae3eea5e7f\") " pod="metallb-system/frr-k8s-fs94m" Dec 06 15:46:49 crc kubenswrapper[5003]: I1206 15:46:49.086533 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/395612ce-6ba7-4b60-822c-dbae3eea5e7f-reloader\") pod \"frr-k8s-fs94m\" (UID: \"395612ce-6ba7-4b60-822c-dbae3eea5e7f\") " pod="metallb-system/frr-k8s-fs94m" Dec 06 15:46:49 crc kubenswrapper[5003]: I1206 15:46:49.086573 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/395612ce-6ba7-4b60-822c-dbae3eea5e7f-frr-conf\") pod \"frr-k8s-fs94m\" (UID: \"395612ce-6ba7-4b60-822c-dbae3eea5e7f\") " pod="metallb-system/frr-k8s-fs94m" Dec 06 15:46:49 crc kubenswrapper[5003]: I1206 15:46:49.086606 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/395612ce-6ba7-4b60-822c-dbae3eea5e7f-frr-sockets\") pod \"frr-k8s-fs94m\" (UID: \"395612ce-6ba7-4b60-822c-dbae3eea5e7f\") " pod="metallb-system/frr-k8s-fs94m" Dec 06 15:46:49 crc kubenswrapper[5003]: I1206 15:46:49.086634 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/395612ce-6ba7-4b60-822c-dbae3eea5e7f-frr-startup\") pod \"frr-k8s-fs94m\" (UID: \"395612ce-6ba7-4b60-822c-dbae3eea5e7f\") " pod="metallb-system/frr-k8s-fs94m" Dec 06 15:46:49 crc kubenswrapper[5003]: I1206 15:46:49.187325 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/50405731-e195-43a8-a231-895b2b19b554-metrics-certs\") pod \"speaker-vtb75\" (UID: \"50405731-e195-43a8-a231-895b2b19b554\") " pod="metallb-system/speaker-vtb75" Dec 06 15:46:49 crc kubenswrapper[5003]: I1206 15:46:49.187414 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-66rxn\" (UniqueName: \"kubernetes.io/projected/949aaefb-e672-4000-8a50-e943723611ff-kube-api-access-66rxn\") pod \"controller-f8648f98b-2ddqw\" (UID: \"949aaefb-e672-4000-8a50-e943723611ff\") " pod="metallb-system/controller-f8648f98b-2ddqw" Dec 06 15:46:49 crc kubenswrapper[5003]: I1206 15:46:49.187449 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/949aaefb-e672-4000-8a50-e943723611ff-cert\") pod \"controller-f8648f98b-2ddqw\" (UID: \"949aaefb-e672-4000-8a50-e943723611ff\") " pod="metallb-system/controller-f8648f98b-2ddqw" Dec 06 15:46:49 crc kubenswrapper[5003]: I1206 15:46:49.187480 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/395612ce-6ba7-4b60-822c-dbae3eea5e7f-metrics-certs\") pod \"frr-k8s-fs94m\" (UID: \"395612ce-6ba7-4b60-822c-dbae3eea5e7f\") " pod="metallb-system/frr-k8s-fs94m" Dec 06 15:46:49 crc kubenswrapper[5003]: I1206 15:46:49.187527 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/395612ce-6ba7-4b60-822c-dbae3eea5e7f-reloader\") pod \"frr-k8s-fs94m\" (UID: \"395612ce-6ba7-4b60-822c-dbae3eea5e7f\") " pod="metallb-system/frr-k8s-fs94m" Dec 06 15:46:49 crc kubenswrapper[5003]: I1206 15:46:49.187557 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/395612ce-6ba7-4b60-822c-dbae3eea5e7f-frr-conf\") pod \"frr-k8s-fs94m\" (UID: \"395612ce-6ba7-4b60-822c-dbae3eea5e7f\") " pod="metallb-system/frr-k8s-fs94m" Dec 06 15:46:49 crc kubenswrapper[5003]: I1206 15:46:49.187598 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/949aaefb-e672-4000-8a50-e943723611ff-metrics-certs\") pod \"controller-f8648f98b-2ddqw\" (UID: \"949aaefb-e672-4000-8a50-e943723611ff\") " pod="metallb-system/controller-f8648f98b-2ddqw" Dec 06 15:46:49 crc kubenswrapper[5003]: I1206 15:46:49.187630 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/395612ce-6ba7-4b60-822c-dbae3eea5e7f-frr-sockets\") pod \"frr-k8s-fs94m\" (UID: \"395612ce-6ba7-4b60-822c-dbae3eea5e7f\") " pod="metallb-system/frr-k8s-fs94m" Dec 06 15:46:49 crc kubenswrapper[5003]: I1206 15:46:49.187656 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/50405731-e195-43a8-a231-895b2b19b554-memberlist\") pod \"speaker-vtb75\" (UID: \"50405731-e195-43a8-a231-895b2b19b554\") " pod="metallb-system/speaker-vtb75" Dec 06 15:46:49 crc kubenswrapper[5003]: I1206 15:46:49.187682 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/395612ce-6ba7-4b60-822c-dbae3eea5e7f-frr-startup\") pod \"frr-k8s-fs94m\" (UID: \"395612ce-6ba7-4b60-822c-dbae3eea5e7f\") " pod="metallb-system/frr-k8s-fs94m" Dec 06 15:46:49 crc kubenswrapper[5003]: I1206 15:46:49.187707 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jhbr8\" (UniqueName: \"kubernetes.io/projected/395612ce-6ba7-4b60-822c-dbae3eea5e7f-kube-api-access-jhbr8\") pod \"frr-k8s-fs94m\" (UID: \"395612ce-6ba7-4b60-822c-dbae3eea5e7f\") " pod="metallb-system/frr-k8s-fs94m" Dec 06 15:46:49 crc kubenswrapper[5003]: I1206 15:46:49.187731 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z6c9p\" (UniqueName: \"kubernetes.io/projected/50405731-e195-43a8-a231-895b2b19b554-kube-api-access-z6c9p\") pod \"speaker-vtb75\" (UID: \"50405731-e195-43a8-a231-895b2b19b554\") " pod="metallb-system/speaker-vtb75" Dec 06 15:46:49 crc kubenswrapper[5003]: I1206 15:46:49.187772 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/395612ce-6ba7-4b60-822c-dbae3eea5e7f-metrics\") pod \"frr-k8s-fs94m\" (UID: \"395612ce-6ba7-4b60-822c-dbae3eea5e7f\") " pod="metallb-system/frr-k8s-fs94m" Dec 06 15:46:49 crc kubenswrapper[5003]: I1206 15:46:49.187801 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/8d4e8c3d-e37b-4489-bd61-84af9e792de1-cert\") pod \"frr-k8s-webhook-server-7fcb986d4-2jzmn\" (UID: \"8d4e8c3d-e37b-4489-bd61-84af9e792de1\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-2jzmn" Dec 06 15:46:49 crc kubenswrapper[5003]: I1206 15:46:49.187834 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/50405731-e195-43a8-a231-895b2b19b554-metallb-excludel2\") pod \"speaker-vtb75\" (UID: \"50405731-e195-43a8-a231-895b2b19b554\") " pod="metallb-system/speaker-vtb75" Dec 06 15:46:49 crc kubenswrapper[5003]: I1206 15:46:49.187858 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4xf5m\" (UniqueName: \"kubernetes.io/projected/8d4e8c3d-e37b-4489-bd61-84af9e792de1-kube-api-access-4xf5m\") pod \"frr-k8s-webhook-server-7fcb986d4-2jzmn\" (UID: \"8d4e8c3d-e37b-4489-bd61-84af9e792de1\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-2jzmn" Dec 06 15:46:49 crc kubenswrapper[5003]: E1206 15:46:49.188299 5003 secret.go:188] Couldn't get secret metallb-system/frr-k8s-certs-secret: secret "frr-k8s-certs-secret" not found Dec 06 15:46:49 crc kubenswrapper[5003]: E1206 15:46:49.188355 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/395612ce-6ba7-4b60-822c-dbae3eea5e7f-metrics-certs podName:395612ce-6ba7-4b60-822c-dbae3eea5e7f nodeName:}" failed. No retries permitted until 2025-12-06 15:46:49.688336726 +0000 UTC m=+888.221691117 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/395612ce-6ba7-4b60-822c-dbae3eea5e7f-metrics-certs") pod "frr-k8s-fs94m" (UID: "395612ce-6ba7-4b60-822c-dbae3eea5e7f") : secret "frr-k8s-certs-secret" not found Dec 06 15:46:49 crc kubenswrapper[5003]: I1206 15:46:49.189096 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/395612ce-6ba7-4b60-822c-dbae3eea5e7f-metrics\") pod \"frr-k8s-fs94m\" (UID: \"395612ce-6ba7-4b60-822c-dbae3eea5e7f\") " pod="metallb-system/frr-k8s-fs94m" Dec 06 15:46:49 crc kubenswrapper[5003]: I1206 15:46:49.189339 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/395612ce-6ba7-4b60-822c-dbae3eea5e7f-frr-startup\") pod \"frr-k8s-fs94m\" (UID: \"395612ce-6ba7-4b60-822c-dbae3eea5e7f\") " pod="metallb-system/frr-k8s-fs94m" Dec 06 15:46:49 crc kubenswrapper[5003]: I1206 15:46:49.189580 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/395612ce-6ba7-4b60-822c-dbae3eea5e7f-reloader\") pod \"frr-k8s-fs94m\" (UID: \"395612ce-6ba7-4b60-822c-dbae3eea5e7f\") " pod="metallb-system/frr-k8s-fs94m" Dec 06 15:46:49 crc kubenswrapper[5003]: I1206 15:46:49.189778 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/395612ce-6ba7-4b60-822c-dbae3eea5e7f-frr-conf\") pod \"frr-k8s-fs94m\" (UID: \"395612ce-6ba7-4b60-822c-dbae3eea5e7f\") " pod="metallb-system/frr-k8s-fs94m" Dec 06 15:46:49 crc kubenswrapper[5003]: I1206 15:46:49.189957 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/395612ce-6ba7-4b60-822c-dbae3eea5e7f-frr-sockets\") pod \"frr-k8s-fs94m\" (UID: \"395612ce-6ba7-4b60-822c-dbae3eea5e7f\") " pod="metallb-system/frr-k8s-fs94m" Dec 06 15:46:49 crc kubenswrapper[5003]: I1206 15:46:49.195177 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/8d4e8c3d-e37b-4489-bd61-84af9e792de1-cert\") pod \"frr-k8s-webhook-server-7fcb986d4-2jzmn\" (UID: \"8d4e8c3d-e37b-4489-bd61-84af9e792de1\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-2jzmn" Dec 06 15:46:49 crc kubenswrapper[5003]: I1206 15:46:49.207853 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jhbr8\" (UniqueName: \"kubernetes.io/projected/395612ce-6ba7-4b60-822c-dbae3eea5e7f-kube-api-access-jhbr8\") pod \"frr-k8s-fs94m\" (UID: \"395612ce-6ba7-4b60-822c-dbae3eea5e7f\") " pod="metallb-system/frr-k8s-fs94m" Dec 06 15:46:49 crc kubenswrapper[5003]: I1206 15:46:49.220729 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4xf5m\" (UniqueName: \"kubernetes.io/projected/8d4e8c3d-e37b-4489-bd61-84af9e792de1-kube-api-access-4xf5m\") pod \"frr-k8s-webhook-server-7fcb986d4-2jzmn\" (UID: \"8d4e8c3d-e37b-4489-bd61-84af9e792de1\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-2jzmn" Dec 06 15:46:49 crc kubenswrapper[5003]: I1206 15:46:49.263435 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-2jzmn" Dec 06 15:46:49 crc kubenswrapper[5003]: I1206 15:46:49.290254 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/50405731-e195-43a8-a231-895b2b19b554-metallb-excludel2\") pod \"speaker-vtb75\" (UID: \"50405731-e195-43a8-a231-895b2b19b554\") " pod="metallb-system/speaker-vtb75" Dec 06 15:46:49 crc kubenswrapper[5003]: I1206 15:46:49.290828 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/50405731-e195-43a8-a231-895b2b19b554-metrics-certs\") pod \"speaker-vtb75\" (UID: \"50405731-e195-43a8-a231-895b2b19b554\") " pod="metallb-system/speaker-vtb75" Dec 06 15:46:49 crc kubenswrapper[5003]: I1206 15:46:49.291029 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/50405731-e195-43a8-a231-895b2b19b554-metallb-excludel2\") pod \"speaker-vtb75\" (UID: \"50405731-e195-43a8-a231-895b2b19b554\") " pod="metallb-system/speaker-vtb75" Dec 06 15:46:49 crc kubenswrapper[5003]: I1206 15:46:49.291812 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-66rxn\" (UniqueName: \"kubernetes.io/projected/949aaefb-e672-4000-8a50-e943723611ff-kube-api-access-66rxn\") pod \"controller-f8648f98b-2ddqw\" (UID: \"949aaefb-e672-4000-8a50-e943723611ff\") " pod="metallb-system/controller-f8648f98b-2ddqw" Dec 06 15:46:49 crc kubenswrapper[5003]: I1206 15:46:49.291857 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/949aaefb-e672-4000-8a50-e943723611ff-cert\") pod \"controller-f8648f98b-2ddqw\" (UID: \"949aaefb-e672-4000-8a50-e943723611ff\") " pod="metallb-system/controller-f8648f98b-2ddqw" Dec 06 15:46:49 crc kubenswrapper[5003]: I1206 15:46:49.291994 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/949aaefb-e672-4000-8a50-e943723611ff-metrics-certs\") pod \"controller-f8648f98b-2ddqw\" (UID: \"949aaefb-e672-4000-8a50-e943723611ff\") " pod="metallb-system/controller-f8648f98b-2ddqw" Dec 06 15:46:49 crc kubenswrapper[5003]: I1206 15:46:49.292048 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/50405731-e195-43a8-a231-895b2b19b554-memberlist\") pod \"speaker-vtb75\" (UID: \"50405731-e195-43a8-a231-895b2b19b554\") " pod="metallb-system/speaker-vtb75" Dec 06 15:46:49 crc kubenswrapper[5003]: I1206 15:46:49.292091 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z6c9p\" (UniqueName: \"kubernetes.io/projected/50405731-e195-43a8-a231-895b2b19b554-kube-api-access-z6c9p\") pod \"speaker-vtb75\" (UID: \"50405731-e195-43a8-a231-895b2b19b554\") " pod="metallb-system/speaker-vtb75" Dec 06 15:46:49 crc kubenswrapper[5003]: E1206 15:46:49.292271 5003 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Dec 06 15:46:49 crc kubenswrapper[5003]: E1206 15:46:49.292372 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/50405731-e195-43a8-a231-895b2b19b554-memberlist podName:50405731-e195-43a8-a231-895b2b19b554 nodeName:}" failed. No retries permitted until 2025-12-06 15:46:49.792347091 +0000 UTC m=+888.325701522 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/50405731-e195-43a8-a231-895b2b19b554-memberlist") pod "speaker-vtb75" (UID: "50405731-e195-43a8-a231-895b2b19b554") : secret "metallb-memberlist" not found Dec 06 15:46:49 crc kubenswrapper[5003]: I1206 15:46:49.294542 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/50405731-e195-43a8-a231-895b2b19b554-metrics-certs\") pod \"speaker-vtb75\" (UID: \"50405731-e195-43a8-a231-895b2b19b554\") " pod="metallb-system/speaker-vtb75" Dec 06 15:46:49 crc kubenswrapper[5003]: I1206 15:46:49.296244 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/949aaefb-e672-4000-8a50-e943723611ff-cert\") pod \"controller-f8648f98b-2ddqw\" (UID: \"949aaefb-e672-4000-8a50-e943723611ff\") " pod="metallb-system/controller-f8648f98b-2ddqw" Dec 06 15:46:49 crc kubenswrapper[5003]: I1206 15:46:49.296323 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/949aaefb-e672-4000-8a50-e943723611ff-metrics-certs\") pod \"controller-f8648f98b-2ddqw\" (UID: \"949aaefb-e672-4000-8a50-e943723611ff\") " pod="metallb-system/controller-f8648f98b-2ddqw" Dec 06 15:46:49 crc kubenswrapper[5003]: I1206 15:46:49.308188 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z6c9p\" (UniqueName: \"kubernetes.io/projected/50405731-e195-43a8-a231-895b2b19b554-kube-api-access-z6c9p\") pod \"speaker-vtb75\" (UID: \"50405731-e195-43a8-a231-895b2b19b554\") " pod="metallb-system/speaker-vtb75" Dec 06 15:46:49 crc kubenswrapper[5003]: I1206 15:46:49.308326 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-66rxn\" (UniqueName: \"kubernetes.io/projected/949aaefb-e672-4000-8a50-e943723611ff-kube-api-access-66rxn\") pod \"controller-f8648f98b-2ddqw\" (UID: \"949aaefb-e672-4000-8a50-e943723611ff\") " pod="metallb-system/controller-f8648f98b-2ddqw" Dec 06 15:46:49 crc kubenswrapper[5003]: I1206 15:46:49.375917 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-f8648f98b-2ddqw" Dec 06 15:46:49 crc kubenswrapper[5003]: I1206 15:46:49.712879 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/395612ce-6ba7-4b60-822c-dbae3eea5e7f-metrics-certs\") pod \"frr-k8s-fs94m\" (UID: \"395612ce-6ba7-4b60-822c-dbae3eea5e7f\") " pod="metallb-system/frr-k8s-fs94m" Dec 06 15:46:49 crc kubenswrapper[5003]: I1206 15:46:49.717602 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/395612ce-6ba7-4b60-822c-dbae3eea5e7f-metrics-certs\") pod \"frr-k8s-fs94m\" (UID: \"395612ce-6ba7-4b60-822c-dbae3eea5e7f\") " pod="metallb-system/frr-k8s-fs94m" Dec 06 15:46:49 crc kubenswrapper[5003]: I1206 15:46:49.752454 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-7fcb986d4-2jzmn"] Dec 06 15:46:49 crc kubenswrapper[5003]: I1206 15:46:49.815094 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/50405731-e195-43a8-a231-895b2b19b554-memberlist\") pod \"speaker-vtb75\" (UID: \"50405731-e195-43a8-a231-895b2b19b554\") " pod="metallb-system/speaker-vtb75" Dec 06 15:46:49 crc kubenswrapper[5003]: E1206 15:46:49.815342 5003 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Dec 06 15:46:49 crc kubenswrapper[5003]: E1206 15:46:49.815402 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/50405731-e195-43a8-a231-895b2b19b554-memberlist podName:50405731-e195-43a8-a231-895b2b19b554 nodeName:}" failed. No retries permitted until 2025-12-06 15:46:50.815382501 +0000 UTC m=+889.348736882 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/50405731-e195-43a8-a231-895b2b19b554-memberlist") pod "speaker-vtb75" (UID: "50405731-e195-43a8-a231-895b2b19b554") : secret "metallb-memberlist" not found Dec 06 15:46:49 crc kubenswrapper[5003]: I1206 15:46:49.818864 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-f8648f98b-2ddqw"] Dec 06 15:46:49 crc kubenswrapper[5003]: W1206 15:46:49.823784 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod949aaefb_e672_4000_8a50_e943723611ff.slice/crio-1b4d48704a1b6627ebdcdd27081732a89f5e1fc7b3582ff59a9c43a07f5ba6a7 WatchSource:0}: Error finding container 1b4d48704a1b6627ebdcdd27081732a89f5e1fc7b3582ff59a9c43a07f5ba6a7: Status 404 returned error can't find the container with id 1b4d48704a1b6627ebdcdd27081732a89f5e1fc7b3582ff59a9c43a07f5ba6a7 Dec 06 15:46:49 crc kubenswrapper[5003]: I1206 15:46:49.880341 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-fs94m" Dec 06 15:46:50 crc kubenswrapper[5003]: I1206 15:46:50.618936 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-fs94m" event={"ID":"395612ce-6ba7-4b60-822c-dbae3eea5e7f","Type":"ContainerStarted","Data":"d176e933e56a75c561d92ccda94f17e0eff6e958065b7e6c80161072d74ba680"} Dec 06 15:46:50 crc kubenswrapper[5003]: I1206 15:46:50.620556 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-f8648f98b-2ddqw" event={"ID":"949aaefb-e672-4000-8a50-e943723611ff","Type":"ContainerStarted","Data":"f6dfeaf7569b632dc727de403bc487ad6b3a1de7d1c561faa6435f3410898e4e"} Dec 06 15:46:50 crc kubenswrapper[5003]: I1206 15:46:50.620604 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-f8648f98b-2ddqw" event={"ID":"949aaefb-e672-4000-8a50-e943723611ff","Type":"ContainerStarted","Data":"1b4d48704a1b6627ebdcdd27081732a89f5e1fc7b3582ff59a9c43a07f5ba6a7"} Dec 06 15:46:50 crc kubenswrapper[5003]: I1206 15:46:50.621846 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-2jzmn" event={"ID":"8d4e8c3d-e37b-4489-bd61-84af9e792de1","Type":"ContainerStarted","Data":"b7060a0dcf3470ecf56628d0adf11d3d345206ae7c1fc3cc0b729a131ca36cb2"} Dec 06 15:46:50 crc kubenswrapper[5003]: I1206 15:46:50.829392 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/50405731-e195-43a8-a231-895b2b19b554-memberlist\") pod \"speaker-vtb75\" (UID: \"50405731-e195-43a8-a231-895b2b19b554\") " pod="metallb-system/speaker-vtb75" Dec 06 15:46:50 crc kubenswrapper[5003]: I1206 15:46:50.835333 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/50405731-e195-43a8-a231-895b2b19b554-memberlist\") pod \"speaker-vtb75\" (UID: \"50405731-e195-43a8-a231-895b2b19b554\") " pod="metallb-system/speaker-vtb75" Dec 06 15:46:50 crc kubenswrapper[5003]: I1206 15:46:50.847796 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-vtb75" Dec 06 15:46:50 crc kubenswrapper[5003]: W1206 15:46:50.873962 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod50405731_e195_43a8_a231_895b2b19b554.slice/crio-6ccdde176b899516fcf03cbe4b9ce2056e2a46f8f28c0ca242bb5d878d269ee0 WatchSource:0}: Error finding container 6ccdde176b899516fcf03cbe4b9ce2056e2a46f8f28c0ca242bb5d878d269ee0: Status 404 returned error can't find the container with id 6ccdde176b899516fcf03cbe4b9ce2056e2a46f8f28c0ca242bb5d878d269ee0 Dec 06 15:46:51 crc kubenswrapper[5003]: I1206 15:46:51.629681 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-vtb75" event={"ID":"50405731-e195-43a8-a231-895b2b19b554","Type":"ContainerStarted","Data":"dadfae19c4be37772add2ae636a5afbaefbaab9b9904994e8eadc1682362b529"} Dec 06 15:46:51 crc kubenswrapper[5003]: I1206 15:46:51.629750 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-vtb75" event={"ID":"50405731-e195-43a8-a231-895b2b19b554","Type":"ContainerStarted","Data":"6ccdde176b899516fcf03cbe4b9ce2056e2a46f8f28c0ca242bb5d878d269ee0"} Dec 06 15:46:56 crc kubenswrapper[5003]: I1206 15:46:56.731983 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-f8648f98b-2ddqw" event={"ID":"949aaefb-e672-4000-8a50-e943723611ff","Type":"ContainerStarted","Data":"89f629797c510f752d883c36dc954f06b8e62b0427f3b3e833a674dba94eec57"} Dec 06 15:46:56 crc kubenswrapper[5003]: I1206 15:46:56.732620 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/controller-f8648f98b-2ddqw" Dec 06 15:46:56 crc kubenswrapper[5003]: I1206 15:46:56.738092 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-vtb75" event={"ID":"50405731-e195-43a8-a231-895b2b19b554","Type":"ContainerStarted","Data":"d81a03ef6778f2d53c40bbd937bd9f65f196732312ffdb1037cc3911c70eadac"} Dec 06 15:46:56 crc kubenswrapper[5003]: I1206 15:46:56.738268 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/speaker-vtb75" Dec 06 15:46:56 crc kubenswrapper[5003]: I1206 15:46:56.772723 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/speaker-vtb75" podStartSLOduration=3.107774403 podStartE2EDuration="7.772704923s" podCreationTimestamp="2025-12-06 15:46:49 +0000 UTC" firstStartedPulling="2025-12-06 15:46:51.093807892 +0000 UTC m=+889.627162273" lastFinishedPulling="2025-12-06 15:46:55.758738412 +0000 UTC m=+894.292092793" observedRunningTime="2025-12-06 15:46:56.77257249 +0000 UTC m=+895.305926881" watchObservedRunningTime="2025-12-06 15:46:56.772704923 +0000 UTC m=+895.306059304" Dec 06 15:46:56 crc kubenswrapper[5003]: I1206 15:46:56.775008 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/controller-f8648f98b-2ddqw" podStartSLOduration=2.491585968 podStartE2EDuration="7.774999697s" podCreationTimestamp="2025-12-06 15:46:49 +0000 UTC" firstStartedPulling="2025-12-06 15:46:50.449570672 +0000 UTC m=+888.982925053" lastFinishedPulling="2025-12-06 15:46:55.732984401 +0000 UTC m=+894.266338782" observedRunningTime="2025-12-06 15:46:56.752344721 +0000 UTC m=+895.285699142" watchObservedRunningTime="2025-12-06 15:46:56.774999697 +0000 UTC m=+895.308354078" Dec 06 15:47:00 crc kubenswrapper[5003]: I1206 15:47:00.882455 5003 generic.go:334] "Generic (PLEG): container finished" podID="395612ce-6ba7-4b60-822c-dbae3eea5e7f" containerID="2b6e2ae7a0c1ac57bab3ec5855e8f378740fc7b3e1a540e82bc82e5640645b29" exitCode=0 Dec 06 15:47:00 crc kubenswrapper[5003]: I1206 15:47:00.882558 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-fs94m" event={"ID":"395612ce-6ba7-4b60-822c-dbae3eea5e7f","Type":"ContainerDied","Data":"2b6e2ae7a0c1ac57bab3ec5855e8f378740fc7b3e1a540e82bc82e5640645b29"} Dec 06 15:47:00 crc kubenswrapper[5003]: I1206 15:47:00.884424 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-2jzmn" event={"ID":"8d4e8c3d-e37b-4489-bd61-84af9e792de1","Type":"ContainerStarted","Data":"bb44f71d02fdef9940df81fa576969132c6b0bcedb6908eefc8cdabad6744bfe"} Dec 06 15:47:00 crc kubenswrapper[5003]: I1206 15:47:00.884638 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-2jzmn" Dec 06 15:47:00 crc kubenswrapper[5003]: I1206 15:47:00.930037 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-2jzmn" podStartSLOduration=2.452331559 podStartE2EDuration="12.930012492s" podCreationTimestamp="2025-12-06 15:46:48 +0000 UTC" firstStartedPulling="2025-12-06 15:46:49.77159982 +0000 UTC m=+888.304954191" lastFinishedPulling="2025-12-06 15:47:00.249280743 +0000 UTC m=+898.782635124" observedRunningTime="2025-12-06 15:47:00.926132955 +0000 UTC m=+899.459487356" watchObservedRunningTime="2025-12-06 15:47:00.930012492 +0000 UTC m=+899.463366883" Dec 06 15:47:01 crc kubenswrapper[5003]: I1206 15:47:01.893000 5003 generic.go:334] "Generic (PLEG): container finished" podID="395612ce-6ba7-4b60-822c-dbae3eea5e7f" containerID="3dcc93fbc2099f48def6fdc0029a330dd8c5bb9acbd526b55250be4a0a2db26f" exitCode=0 Dec 06 15:47:01 crc kubenswrapper[5003]: I1206 15:47:01.893098 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-fs94m" event={"ID":"395612ce-6ba7-4b60-822c-dbae3eea5e7f","Type":"ContainerDied","Data":"3dcc93fbc2099f48def6fdc0029a330dd8c5bb9acbd526b55250be4a0a2db26f"} Dec 06 15:47:02 crc kubenswrapper[5003]: I1206 15:47:02.900890 5003 generic.go:334] "Generic (PLEG): container finished" podID="395612ce-6ba7-4b60-822c-dbae3eea5e7f" containerID="711f372c99f035eab5121b1683a40c480481d640593cda5c2b9e73697b8bd8dd" exitCode=0 Dec 06 15:47:02 crc kubenswrapper[5003]: I1206 15:47:02.900956 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-fs94m" event={"ID":"395612ce-6ba7-4b60-822c-dbae3eea5e7f","Type":"ContainerDied","Data":"711f372c99f035eab5121b1683a40c480481d640593cda5c2b9e73697b8bd8dd"} Dec 06 15:47:03 crc kubenswrapper[5003]: I1206 15:47:03.911019 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-fs94m" event={"ID":"395612ce-6ba7-4b60-822c-dbae3eea5e7f","Type":"ContainerStarted","Data":"21b6a5c983cb7107733e4b3a8992bfd339e7ae895e9b60faa90074fdf03d1433"} Dec 06 15:47:03 crc kubenswrapper[5003]: I1206 15:47:03.911343 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-fs94m" event={"ID":"395612ce-6ba7-4b60-822c-dbae3eea5e7f","Type":"ContainerStarted","Data":"b131a0e5b44be34d2e67bcdce9f7f5da5e8e1a20c259e8bb15bdb0eb6d3baf3f"} Dec 06 15:47:03 crc kubenswrapper[5003]: I1206 15:47:03.911361 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-fs94m" Dec 06 15:47:03 crc kubenswrapper[5003]: I1206 15:47:03.911372 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-fs94m" event={"ID":"395612ce-6ba7-4b60-822c-dbae3eea5e7f","Type":"ContainerStarted","Data":"3effacbef727993f534043d84d800b3d384938e4f8e9a303f017798444ae5af7"} Dec 06 15:47:03 crc kubenswrapper[5003]: I1206 15:47:03.911383 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-fs94m" event={"ID":"395612ce-6ba7-4b60-822c-dbae3eea5e7f","Type":"ContainerStarted","Data":"ba7ac80ba59e85b784c4fb896d13f1b668bdcc4bb701f35cf0b1444f2bf097d6"} Dec 06 15:47:03 crc kubenswrapper[5003]: I1206 15:47:03.911393 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-fs94m" event={"ID":"395612ce-6ba7-4b60-822c-dbae3eea5e7f","Type":"ContainerStarted","Data":"a12dec5f0ca10021dccab1d9cf624c146bbaf6ac01f04db21ded02b298e31f3f"} Dec 06 15:47:03 crc kubenswrapper[5003]: I1206 15:47:03.911403 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-fs94m" event={"ID":"395612ce-6ba7-4b60-822c-dbae3eea5e7f","Type":"ContainerStarted","Data":"09cfb895904a474affa2082b39800c327c5de494c402c1bb2d3dfc82462adba7"} Dec 06 15:47:03 crc kubenswrapper[5003]: I1206 15:47:03.936715 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-fs94m" podStartSLOduration=6.099698389 podStartE2EDuration="15.936692801s" podCreationTimestamp="2025-12-06 15:46:48 +0000 UTC" firstStartedPulling="2025-12-06 15:46:50.430793943 +0000 UTC m=+888.964148324" lastFinishedPulling="2025-12-06 15:47:00.267788345 +0000 UTC m=+898.801142736" observedRunningTime="2025-12-06 15:47:03.93378487 +0000 UTC m=+902.467139311" watchObservedRunningTime="2025-12-06 15:47:03.936692801 +0000 UTC m=+902.470047182" Dec 06 15:47:04 crc kubenswrapper[5003]: I1206 15:47:04.880581 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="metallb-system/frr-k8s-fs94m" Dec 06 15:47:04 crc kubenswrapper[5003]: I1206 15:47:04.917903 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="metallb-system/frr-k8s-fs94m" Dec 06 15:47:09 crc kubenswrapper[5003]: I1206 15:47:09.381113 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/controller-f8648f98b-2ddqw" Dec 06 15:47:10 crc kubenswrapper[5003]: I1206 15:47:10.851341 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/speaker-vtb75" Dec 06 15:47:13 crc kubenswrapper[5003]: I1206 15:47:13.471423 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-mssvp"] Dec 06 15:47:13 crc kubenswrapper[5003]: I1206 15:47:13.473247 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-mssvp" Dec 06 15:47:13 crc kubenswrapper[5003]: I1206 15:47:13.485834 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-mssvp"] Dec 06 15:47:13 crc kubenswrapper[5003]: I1206 15:47:13.646571 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/167f8562-1584-47b3-9f2f-3a3f8239bbe5-catalog-content\") pod \"certified-operators-mssvp\" (UID: \"167f8562-1584-47b3-9f2f-3a3f8239bbe5\") " pod="openshift-marketplace/certified-operators-mssvp" Dec 06 15:47:13 crc kubenswrapper[5003]: I1206 15:47:13.646639 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ntz9t\" (UniqueName: \"kubernetes.io/projected/167f8562-1584-47b3-9f2f-3a3f8239bbe5-kube-api-access-ntz9t\") pod \"certified-operators-mssvp\" (UID: \"167f8562-1584-47b3-9f2f-3a3f8239bbe5\") " pod="openshift-marketplace/certified-operators-mssvp" Dec 06 15:47:13 crc kubenswrapper[5003]: I1206 15:47:13.646680 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/167f8562-1584-47b3-9f2f-3a3f8239bbe5-utilities\") pod \"certified-operators-mssvp\" (UID: \"167f8562-1584-47b3-9f2f-3a3f8239bbe5\") " pod="openshift-marketplace/certified-operators-mssvp" Dec 06 15:47:13 crc kubenswrapper[5003]: I1206 15:47:13.747506 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/167f8562-1584-47b3-9f2f-3a3f8239bbe5-catalog-content\") pod \"certified-operators-mssvp\" (UID: \"167f8562-1584-47b3-9f2f-3a3f8239bbe5\") " pod="openshift-marketplace/certified-operators-mssvp" Dec 06 15:47:13 crc kubenswrapper[5003]: I1206 15:47:13.747577 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ntz9t\" (UniqueName: \"kubernetes.io/projected/167f8562-1584-47b3-9f2f-3a3f8239bbe5-kube-api-access-ntz9t\") pod \"certified-operators-mssvp\" (UID: \"167f8562-1584-47b3-9f2f-3a3f8239bbe5\") " pod="openshift-marketplace/certified-operators-mssvp" Dec 06 15:47:13 crc kubenswrapper[5003]: I1206 15:47:13.747613 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/167f8562-1584-47b3-9f2f-3a3f8239bbe5-utilities\") pod \"certified-operators-mssvp\" (UID: \"167f8562-1584-47b3-9f2f-3a3f8239bbe5\") " pod="openshift-marketplace/certified-operators-mssvp" Dec 06 15:47:13 crc kubenswrapper[5003]: I1206 15:47:13.748397 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/167f8562-1584-47b3-9f2f-3a3f8239bbe5-catalog-content\") pod \"certified-operators-mssvp\" (UID: \"167f8562-1584-47b3-9f2f-3a3f8239bbe5\") " pod="openshift-marketplace/certified-operators-mssvp" Dec 06 15:47:13 crc kubenswrapper[5003]: I1206 15:47:13.748434 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/167f8562-1584-47b3-9f2f-3a3f8239bbe5-utilities\") pod \"certified-operators-mssvp\" (UID: \"167f8562-1584-47b3-9f2f-3a3f8239bbe5\") " pod="openshift-marketplace/certified-operators-mssvp" Dec 06 15:47:13 crc kubenswrapper[5003]: I1206 15:47:13.770121 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ntz9t\" (UniqueName: \"kubernetes.io/projected/167f8562-1584-47b3-9f2f-3a3f8239bbe5-kube-api-access-ntz9t\") pod \"certified-operators-mssvp\" (UID: \"167f8562-1584-47b3-9f2f-3a3f8239bbe5\") " pod="openshift-marketplace/certified-operators-mssvp" Dec 06 15:47:13 crc kubenswrapper[5003]: I1206 15:47:13.793243 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-mssvp" Dec 06 15:47:14 crc kubenswrapper[5003]: I1206 15:47:14.320609 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-mssvp"] Dec 06 15:47:14 crc kubenswrapper[5003]: I1206 15:47:14.980714 5003 generic.go:334] "Generic (PLEG): container finished" podID="167f8562-1584-47b3-9f2f-3a3f8239bbe5" containerID="85a5f580335e0e9e3edcd463e5a831a7514c9daef9f437cf14e639ed11b08a18" exitCode=0 Dec 06 15:47:14 crc kubenswrapper[5003]: I1206 15:47:14.980783 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mssvp" event={"ID":"167f8562-1584-47b3-9f2f-3a3f8239bbe5","Type":"ContainerDied","Data":"85a5f580335e0e9e3edcd463e5a831a7514c9daef9f437cf14e639ed11b08a18"} Dec 06 15:47:14 crc kubenswrapper[5003]: I1206 15:47:14.981028 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mssvp" event={"ID":"167f8562-1584-47b3-9f2f-3a3f8239bbe5","Type":"ContainerStarted","Data":"d4ea6f9dc2edca6fd99bc02747f1eb1627b50c35449e8ee64b6556e9f79eb376"} Dec 06 15:47:17 crc kubenswrapper[5003]: I1206 15:47:17.999087 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mssvp" event={"ID":"167f8562-1584-47b3-9f2f-3a3f8239bbe5","Type":"ContainerStarted","Data":"be01cf79c52b72235964bcff4c102061b358310fd2e2a1a63bf631e0e64ab3f5"} Dec 06 15:47:18 crc kubenswrapper[5003]: I1206 15:47:18.603883 5003 patch_prober.go:28] interesting pod/machine-config-daemon-w25db container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 06 15:47:18 crc kubenswrapper[5003]: I1206 15:47:18.604238 5003 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-w25db" podUID="1a047c4d-003e-4668-9b96-945eab34ab68" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 06 15:47:19 crc kubenswrapper[5003]: I1206 15:47:18.855802 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/mariadb-operator-index-bgm72"] Dec 06 15:47:19 crc kubenswrapper[5003]: I1206 15:47:18.856595 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-index-bgm72" Dec 06 15:47:19 crc kubenswrapper[5003]: I1206 15:47:18.858730 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"kube-root-ca.crt" Dec 06 15:47:19 crc kubenswrapper[5003]: I1206 15:47:18.858761 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"mariadb-operator-index-dockercfg-z6j2m" Dec 06 15:47:19 crc kubenswrapper[5003]: I1206 15:47:18.859437 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"openshift-service-ca.crt" Dec 06 15:47:19 crc kubenswrapper[5003]: I1206 15:47:18.866854 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-index-bgm72"] Dec 06 15:47:19 crc kubenswrapper[5003]: I1206 15:47:19.008683 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mhfqh\" (UniqueName: \"kubernetes.io/projected/093c76e8-0168-4699-bbdc-93ce1cfcd465-kube-api-access-mhfqh\") pod \"mariadb-operator-index-bgm72\" (UID: \"093c76e8-0168-4699-bbdc-93ce1cfcd465\") " pod="openstack-operators/mariadb-operator-index-bgm72" Dec 06 15:47:19 crc kubenswrapper[5003]: I1206 15:47:19.109544 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mhfqh\" (UniqueName: \"kubernetes.io/projected/093c76e8-0168-4699-bbdc-93ce1cfcd465-kube-api-access-mhfqh\") pod \"mariadb-operator-index-bgm72\" (UID: \"093c76e8-0168-4699-bbdc-93ce1cfcd465\") " pod="openstack-operators/mariadb-operator-index-bgm72" Dec 06 15:47:19 crc kubenswrapper[5003]: I1206 15:47:19.132038 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mhfqh\" (UniqueName: \"kubernetes.io/projected/093c76e8-0168-4699-bbdc-93ce1cfcd465-kube-api-access-mhfqh\") pod \"mariadb-operator-index-bgm72\" (UID: \"093c76e8-0168-4699-bbdc-93ce1cfcd465\") " pod="openstack-operators/mariadb-operator-index-bgm72" Dec 06 15:47:19 crc kubenswrapper[5003]: I1206 15:47:19.272182 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-2jzmn" Dec 06 15:47:19 crc kubenswrapper[5003]: I1206 15:47:19.410512 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-index-bgm72" Dec 06 15:47:19 crc kubenswrapper[5003]: I1206 15:47:19.886825 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-fs94m" Dec 06 15:47:20 crc kubenswrapper[5003]: I1206 15:47:20.003747 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-index-bgm72"] Dec 06 15:47:20 crc kubenswrapper[5003]: W1206 15:47:20.008827 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod093c76e8_0168_4699_bbdc_93ce1cfcd465.slice/crio-1f4c5009f611f653e1745ec04b49547c8aa3e66bfb742baae261f3d1e600c19b WatchSource:0}: Error finding container 1f4c5009f611f653e1745ec04b49547c8aa3e66bfb742baae261f3d1e600c19b: Status 404 returned error can't find the container with id 1f4c5009f611f653e1745ec04b49547c8aa3e66bfb742baae261f3d1e600c19b Dec 06 15:47:20 crc kubenswrapper[5003]: I1206 15:47:20.014336 5003 generic.go:334] "Generic (PLEG): container finished" podID="167f8562-1584-47b3-9f2f-3a3f8239bbe5" containerID="be01cf79c52b72235964bcff4c102061b358310fd2e2a1a63bf631e0e64ab3f5" exitCode=0 Dec 06 15:47:20 crc kubenswrapper[5003]: I1206 15:47:20.014384 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mssvp" event={"ID":"167f8562-1584-47b3-9f2f-3a3f8239bbe5","Type":"ContainerDied","Data":"be01cf79c52b72235964bcff4c102061b358310fd2e2a1a63bf631e0e64ab3f5"} Dec 06 15:47:21 crc kubenswrapper[5003]: I1206 15:47:21.022967 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mssvp" event={"ID":"167f8562-1584-47b3-9f2f-3a3f8239bbe5","Type":"ContainerStarted","Data":"eec956263bc49ca09ec19fb009ceaf0fa5fdda4a5ec1ae86b7b8e00b6eaef777"} Dec 06 15:47:21 crc kubenswrapper[5003]: I1206 15:47:21.025333 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-index-bgm72" event={"ID":"093c76e8-0168-4699-bbdc-93ce1cfcd465","Type":"ContainerStarted","Data":"1f4c5009f611f653e1745ec04b49547c8aa3e66bfb742baae261f3d1e600c19b"} Dec 06 15:47:21 crc kubenswrapper[5003]: I1206 15:47:21.076224 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-mssvp" podStartSLOduration=2.596402962 podStartE2EDuration="8.076201433s" podCreationTimestamp="2025-12-06 15:47:13 +0000 UTC" firstStartedPulling="2025-12-06 15:47:14.982186873 +0000 UTC m=+913.515541264" lastFinishedPulling="2025-12-06 15:47:20.461985354 +0000 UTC m=+918.995339735" observedRunningTime="2025-12-06 15:47:21.071229816 +0000 UTC m=+919.604584207" watchObservedRunningTime="2025-12-06 15:47:21.076201433 +0000 UTC m=+919.609555814" Dec 06 15:47:22 crc kubenswrapper[5003]: I1206 15:47:22.031132 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-index-bgm72" event={"ID":"093c76e8-0168-4699-bbdc-93ce1cfcd465","Type":"ContainerStarted","Data":"8e9e1b1223a1aab29d2299e58a7033ae8291acfa874a5b5439d1b0b08d9083e5"} Dec 06 15:47:22 crc kubenswrapper[5003]: I1206 15:47:22.048170 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/mariadb-operator-index-bgm72" podStartSLOduration=3.090429026 podStartE2EDuration="4.048152542s" podCreationTimestamp="2025-12-06 15:47:18 +0000 UTC" firstStartedPulling="2025-12-06 15:47:20.012300031 +0000 UTC m=+918.545654412" lastFinishedPulling="2025-12-06 15:47:20.970023547 +0000 UTC m=+919.503377928" observedRunningTime="2025-12-06 15:47:22.046396453 +0000 UTC m=+920.579750854" watchObservedRunningTime="2025-12-06 15:47:22.048152542 +0000 UTC m=+920.581506923" Dec 06 15:47:23 crc kubenswrapper[5003]: I1206 15:47:23.793875 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-mssvp" Dec 06 15:47:23 crc kubenswrapper[5003]: I1206 15:47:23.793918 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-mssvp" Dec 06 15:47:23 crc kubenswrapper[5003]: I1206 15:47:23.842901 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-mssvp" Dec 06 15:47:24 crc kubenswrapper[5003]: I1206 15:47:24.215069 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/mariadb-operator-index-bgm72"] Dec 06 15:47:24 crc kubenswrapper[5003]: I1206 15:47:24.215421 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/mariadb-operator-index-bgm72" podUID="093c76e8-0168-4699-bbdc-93ce1cfcd465" containerName="registry-server" containerID="cri-o://8e9e1b1223a1aab29d2299e58a7033ae8291acfa874a5b5439d1b0b08d9083e5" gracePeriod=2 Dec 06 15:47:24 crc kubenswrapper[5003]: I1206 15:47:24.868227 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/mariadb-operator-index-42pp6"] Dec 06 15:47:24 crc kubenswrapper[5003]: I1206 15:47:24.871018 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-index-42pp6" Dec 06 15:47:24 crc kubenswrapper[5003]: I1206 15:47:24.875319 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-index-42pp6"] Dec 06 15:47:25 crc kubenswrapper[5003]: I1206 15:47:25.009207 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8xnjh\" (UniqueName: \"kubernetes.io/projected/815d4582-e47b-4d39-9c18-9886ba2a8e7d-kube-api-access-8xnjh\") pod \"mariadb-operator-index-42pp6\" (UID: \"815d4582-e47b-4d39-9c18-9886ba2a8e7d\") " pod="openstack-operators/mariadb-operator-index-42pp6" Dec 06 15:47:25 crc kubenswrapper[5003]: I1206 15:47:25.108430 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-index-bgm72" Dec 06 15:47:25 crc kubenswrapper[5003]: I1206 15:47:25.110865 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8xnjh\" (UniqueName: \"kubernetes.io/projected/815d4582-e47b-4d39-9c18-9886ba2a8e7d-kube-api-access-8xnjh\") pod \"mariadb-operator-index-42pp6\" (UID: \"815d4582-e47b-4d39-9c18-9886ba2a8e7d\") " pod="openstack-operators/mariadb-operator-index-42pp6" Dec 06 15:47:25 crc kubenswrapper[5003]: I1206 15:47:25.136199 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8xnjh\" (UniqueName: \"kubernetes.io/projected/815d4582-e47b-4d39-9c18-9886ba2a8e7d-kube-api-access-8xnjh\") pod \"mariadb-operator-index-42pp6\" (UID: \"815d4582-e47b-4d39-9c18-9886ba2a8e7d\") " pod="openstack-operators/mariadb-operator-index-42pp6" Dec 06 15:47:25 crc kubenswrapper[5003]: I1206 15:47:25.195663 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-index-42pp6" Dec 06 15:47:25 crc kubenswrapper[5003]: I1206 15:47:25.211413 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mhfqh\" (UniqueName: \"kubernetes.io/projected/093c76e8-0168-4699-bbdc-93ce1cfcd465-kube-api-access-mhfqh\") pod \"093c76e8-0168-4699-bbdc-93ce1cfcd465\" (UID: \"093c76e8-0168-4699-bbdc-93ce1cfcd465\") " Dec 06 15:47:25 crc kubenswrapper[5003]: I1206 15:47:25.214101 5003 generic.go:334] "Generic (PLEG): container finished" podID="093c76e8-0168-4699-bbdc-93ce1cfcd465" containerID="8e9e1b1223a1aab29d2299e58a7033ae8291acfa874a5b5439d1b0b08d9083e5" exitCode=0 Dec 06 15:47:25 crc kubenswrapper[5003]: I1206 15:47:25.214139 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-index-bgm72" event={"ID":"093c76e8-0168-4699-bbdc-93ce1cfcd465","Type":"ContainerDied","Data":"8e9e1b1223a1aab29d2299e58a7033ae8291acfa874a5b5439d1b0b08d9083e5"} Dec 06 15:47:25 crc kubenswrapper[5003]: I1206 15:47:25.214157 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-index-bgm72" Dec 06 15:47:25 crc kubenswrapper[5003]: I1206 15:47:25.214175 5003 scope.go:117] "RemoveContainer" containerID="8e9e1b1223a1aab29d2299e58a7033ae8291acfa874a5b5439d1b0b08d9083e5" Dec 06 15:47:25 crc kubenswrapper[5003]: I1206 15:47:25.214163 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-index-bgm72" event={"ID":"093c76e8-0168-4699-bbdc-93ce1cfcd465","Type":"ContainerDied","Data":"1f4c5009f611f653e1745ec04b49547c8aa3e66bfb742baae261f3d1e600c19b"} Dec 06 15:47:25 crc kubenswrapper[5003]: I1206 15:47:25.217873 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/093c76e8-0168-4699-bbdc-93ce1cfcd465-kube-api-access-mhfqh" (OuterVolumeSpecName: "kube-api-access-mhfqh") pod "093c76e8-0168-4699-bbdc-93ce1cfcd465" (UID: "093c76e8-0168-4699-bbdc-93ce1cfcd465"). InnerVolumeSpecName "kube-api-access-mhfqh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 15:47:25 crc kubenswrapper[5003]: I1206 15:47:25.247342 5003 scope.go:117] "RemoveContainer" containerID="8e9e1b1223a1aab29d2299e58a7033ae8291acfa874a5b5439d1b0b08d9083e5" Dec 06 15:47:25 crc kubenswrapper[5003]: E1206 15:47:25.248228 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8e9e1b1223a1aab29d2299e58a7033ae8291acfa874a5b5439d1b0b08d9083e5\": container with ID starting with 8e9e1b1223a1aab29d2299e58a7033ae8291acfa874a5b5439d1b0b08d9083e5 not found: ID does not exist" containerID="8e9e1b1223a1aab29d2299e58a7033ae8291acfa874a5b5439d1b0b08d9083e5" Dec 06 15:47:25 crc kubenswrapper[5003]: I1206 15:47:25.248277 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8e9e1b1223a1aab29d2299e58a7033ae8291acfa874a5b5439d1b0b08d9083e5"} err="failed to get container status \"8e9e1b1223a1aab29d2299e58a7033ae8291acfa874a5b5439d1b0b08d9083e5\": rpc error: code = NotFound desc = could not find container \"8e9e1b1223a1aab29d2299e58a7033ae8291acfa874a5b5439d1b0b08d9083e5\": container with ID starting with 8e9e1b1223a1aab29d2299e58a7033ae8291acfa874a5b5439d1b0b08d9083e5 not found: ID does not exist" Dec 06 15:47:25 crc kubenswrapper[5003]: I1206 15:47:25.313449 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mhfqh\" (UniqueName: \"kubernetes.io/projected/093c76e8-0168-4699-bbdc-93ce1cfcd465-kube-api-access-mhfqh\") on node \"crc\" DevicePath \"\"" Dec 06 15:47:25 crc kubenswrapper[5003]: I1206 15:47:25.555542 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/mariadb-operator-index-bgm72"] Dec 06 15:47:25 crc kubenswrapper[5003]: I1206 15:47:25.560242 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/mariadb-operator-index-bgm72"] Dec 06 15:47:25 crc kubenswrapper[5003]: I1206 15:47:25.620199 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-index-42pp6"] Dec 06 15:47:25 crc kubenswrapper[5003]: W1206 15:47:25.621641 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod815d4582_e47b_4d39_9c18_9886ba2a8e7d.slice/crio-d1f10e00024ee189da9cd9a73741c160e5128d9aba1226208c36c4c1486949f2 WatchSource:0}: Error finding container d1f10e00024ee189da9cd9a73741c160e5128d9aba1226208c36c4c1486949f2: Status 404 returned error can't find the container with id d1f10e00024ee189da9cd9a73741c160e5128d9aba1226208c36c4c1486949f2 Dec 06 15:47:25 crc kubenswrapper[5003]: I1206 15:47:25.719795 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="093c76e8-0168-4699-bbdc-93ce1cfcd465" path="/var/lib/kubelet/pods/093c76e8-0168-4699-bbdc-93ce1cfcd465/volumes" Dec 06 15:47:26 crc kubenswrapper[5003]: I1206 15:47:26.229719 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-index-42pp6" event={"ID":"815d4582-e47b-4d39-9c18-9886ba2a8e7d","Type":"ContainerStarted","Data":"7e81de4adb97b13184a2f682fe0edc64fcb44c061e4cb9cab407c46b36a311a9"} Dec 06 15:47:26 crc kubenswrapper[5003]: I1206 15:47:26.230112 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-index-42pp6" event={"ID":"815d4582-e47b-4d39-9c18-9886ba2a8e7d","Type":"ContainerStarted","Data":"d1f10e00024ee189da9cd9a73741c160e5128d9aba1226208c36c4c1486949f2"} Dec 06 15:47:26 crc kubenswrapper[5003]: I1206 15:47:26.258973 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/mariadb-operator-index-42pp6" podStartSLOduration=1.872675181 podStartE2EDuration="2.258938167s" podCreationTimestamp="2025-12-06 15:47:24 +0000 UTC" firstStartedPulling="2025-12-06 15:47:25.626012843 +0000 UTC m=+924.159367224" lastFinishedPulling="2025-12-06 15:47:26.012275799 +0000 UTC m=+924.545630210" observedRunningTime="2025-12-06 15:47:26.250429453 +0000 UTC m=+924.783783914" watchObservedRunningTime="2025-12-06 15:47:26.258938167 +0000 UTC m=+924.792292578" Dec 06 15:47:32 crc kubenswrapper[5003]: I1206 15:47:32.860064 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-hbn5g"] Dec 06 15:47:32 crc kubenswrapper[5003]: E1206 15:47:32.861000 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="093c76e8-0168-4699-bbdc-93ce1cfcd465" containerName="registry-server" Dec 06 15:47:32 crc kubenswrapper[5003]: I1206 15:47:32.861017 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="093c76e8-0168-4699-bbdc-93ce1cfcd465" containerName="registry-server" Dec 06 15:47:32 crc kubenswrapper[5003]: I1206 15:47:32.861182 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="093c76e8-0168-4699-bbdc-93ce1cfcd465" containerName="registry-server" Dec 06 15:47:32 crc kubenswrapper[5003]: I1206 15:47:32.862867 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-hbn5g" Dec 06 15:47:32 crc kubenswrapper[5003]: I1206 15:47:32.893526 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-hbn5g"] Dec 06 15:47:32 crc kubenswrapper[5003]: I1206 15:47:32.920255 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bgmjk\" (UniqueName: \"kubernetes.io/projected/c2e79cb9-e11e-43fd-be96-8b35bcff5b83-kube-api-access-bgmjk\") pod \"redhat-marketplace-hbn5g\" (UID: \"c2e79cb9-e11e-43fd-be96-8b35bcff5b83\") " pod="openshift-marketplace/redhat-marketplace-hbn5g" Dec 06 15:47:32 crc kubenswrapper[5003]: I1206 15:47:32.920325 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c2e79cb9-e11e-43fd-be96-8b35bcff5b83-catalog-content\") pod \"redhat-marketplace-hbn5g\" (UID: \"c2e79cb9-e11e-43fd-be96-8b35bcff5b83\") " pod="openshift-marketplace/redhat-marketplace-hbn5g" Dec 06 15:47:32 crc kubenswrapper[5003]: I1206 15:47:32.920433 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c2e79cb9-e11e-43fd-be96-8b35bcff5b83-utilities\") pod \"redhat-marketplace-hbn5g\" (UID: \"c2e79cb9-e11e-43fd-be96-8b35bcff5b83\") " pod="openshift-marketplace/redhat-marketplace-hbn5g" Dec 06 15:47:33 crc kubenswrapper[5003]: I1206 15:47:33.022189 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bgmjk\" (UniqueName: \"kubernetes.io/projected/c2e79cb9-e11e-43fd-be96-8b35bcff5b83-kube-api-access-bgmjk\") pod \"redhat-marketplace-hbn5g\" (UID: \"c2e79cb9-e11e-43fd-be96-8b35bcff5b83\") " pod="openshift-marketplace/redhat-marketplace-hbn5g" Dec 06 15:47:33 crc kubenswrapper[5003]: I1206 15:47:33.022253 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c2e79cb9-e11e-43fd-be96-8b35bcff5b83-catalog-content\") pod \"redhat-marketplace-hbn5g\" (UID: \"c2e79cb9-e11e-43fd-be96-8b35bcff5b83\") " pod="openshift-marketplace/redhat-marketplace-hbn5g" Dec 06 15:47:33 crc kubenswrapper[5003]: I1206 15:47:33.022654 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c2e79cb9-e11e-43fd-be96-8b35bcff5b83-utilities\") pod \"redhat-marketplace-hbn5g\" (UID: \"c2e79cb9-e11e-43fd-be96-8b35bcff5b83\") " pod="openshift-marketplace/redhat-marketplace-hbn5g" Dec 06 15:47:33 crc kubenswrapper[5003]: I1206 15:47:33.022838 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c2e79cb9-e11e-43fd-be96-8b35bcff5b83-catalog-content\") pod \"redhat-marketplace-hbn5g\" (UID: \"c2e79cb9-e11e-43fd-be96-8b35bcff5b83\") " pod="openshift-marketplace/redhat-marketplace-hbn5g" Dec 06 15:47:33 crc kubenswrapper[5003]: I1206 15:47:33.023182 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c2e79cb9-e11e-43fd-be96-8b35bcff5b83-utilities\") pod \"redhat-marketplace-hbn5g\" (UID: \"c2e79cb9-e11e-43fd-be96-8b35bcff5b83\") " pod="openshift-marketplace/redhat-marketplace-hbn5g" Dec 06 15:47:33 crc kubenswrapper[5003]: I1206 15:47:33.054000 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bgmjk\" (UniqueName: \"kubernetes.io/projected/c2e79cb9-e11e-43fd-be96-8b35bcff5b83-kube-api-access-bgmjk\") pod \"redhat-marketplace-hbn5g\" (UID: \"c2e79cb9-e11e-43fd-be96-8b35bcff5b83\") " pod="openshift-marketplace/redhat-marketplace-hbn5g" Dec 06 15:47:33 crc kubenswrapper[5003]: I1206 15:47:33.185500 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-hbn5g" Dec 06 15:47:33 crc kubenswrapper[5003]: I1206 15:47:33.656909 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-hbn5g"] Dec 06 15:47:33 crc kubenswrapper[5003]: I1206 15:47:33.836795 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-mssvp" Dec 06 15:47:34 crc kubenswrapper[5003]: I1206 15:47:34.279385 5003 generic.go:334] "Generic (PLEG): container finished" podID="c2e79cb9-e11e-43fd-be96-8b35bcff5b83" containerID="04e8e2c469bbb5cc2f0f29b0e16ad91c9f374c4ecf4db668bc12599bcc0c0f26" exitCode=0 Dec 06 15:47:34 crc kubenswrapper[5003]: I1206 15:47:34.279424 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-hbn5g" event={"ID":"c2e79cb9-e11e-43fd-be96-8b35bcff5b83","Type":"ContainerDied","Data":"04e8e2c469bbb5cc2f0f29b0e16ad91c9f374c4ecf4db668bc12599bcc0c0f26"} Dec 06 15:47:34 crc kubenswrapper[5003]: I1206 15:47:34.279449 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-hbn5g" event={"ID":"c2e79cb9-e11e-43fd-be96-8b35bcff5b83","Type":"ContainerStarted","Data":"7ab401fae2cece9619eec1e64a94d8d894fec3f056db9d82f68c2b80aad9ce2d"} Dec 06 15:47:35 crc kubenswrapper[5003]: I1206 15:47:35.196320 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/mariadb-operator-index-42pp6" Dec 06 15:47:35 crc kubenswrapper[5003]: I1206 15:47:35.196644 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-operators/mariadb-operator-index-42pp6" Dec 06 15:47:35 crc kubenswrapper[5003]: I1206 15:47:35.225016 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-operators/mariadb-operator-index-42pp6" Dec 06 15:47:35 crc kubenswrapper[5003]: I1206 15:47:35.287757 5003 generic.go:334] "Generic (PLEG): container finished" podID="c2e79cb9-e11e-43fd-be96-8b35bcff5b83" containerID="09d876ccd35d588ff0624dfac7651d4d5068562e6134af51f1d507fc4604b291" exitCode=0 Dec 06 15:47:35 crc kubenswrapper[5003]: I1206 15:47:35.287883 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-hbn5g" event={"ID":"c2e79cb9-e11e-43fd-be96-8b35bcff5b83","Type":"ContainerDied","Data":"09d876ccd35d588ff0624dfac7651d4d5068562e6134af51f1d507fc4604b291"} Dec 06 15:47:35 crc kubenswrapper[5003]: I1206 15:47:35.322531 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/mariadb-operator-index-42pp6" Dec 06 15:47:36 crc kubenswrapper[5003]: I1206 15:47:36.249290 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-mssvp"] Dec 06 15:47:36 crc kubenswrapper[5003]: I1206 15:47:36.250102 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-mssvp" podUID="167f8562-1584-47b3-9f2f-3a3f8239bbe5" containerName="registry-server" containerID="cri-o://eec956263bc49ca09ec19fb009ceaf0fa5fdda4a5ec1ae86b7b8e00b6eaef777" gracePeriod=2 Dec 06 15:47:36 crc kubenswrapper[5003]: I1206 15:47:36.296719 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-hbn5g" event={"ID":"c2e79cb9-e11e-43fd-be96-8b35bcff5b83","Type":"ContainerStarted","Data":"d4dc0690ebcbe979be6f93646016aa017e57ef2496c10372863a03a8a9263787"} Dec 06 15:47:36 crc kubenswrapper[5003]: I1206 15:47:36.657287 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-mssvp" Dec 06 15:47:36 crc kubenswrapper[5003]: I1206 15:47:36.678801 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-hbn5g" podStartSLOduration=3.012594839 podStartE2EDuration="4.678781791s" podCreationTimestamp="2025-12-06 15:47:32 +0000 UTC" firstStartedPulling="2025-12-06 15:47:34.280598695 +0000 UTC m=+932.813953076" lastFinishedPulling="2025-12-06 15:47:35.946785647 +0000 UTC m=+934.480140028" observedRunningTime="2025-12-06 15:47:36.316053735 +0000 UTC m=+934.849408116" watchObservedRunningTime="2025-12-06 15:47:36.678781791 +0000 UTC m=+935.212136172" Dec 06 15:47:36 crc kubenswrapper[5003]: I1206 15:47:36.767077 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/167f8562-1584-47b3-9f2f-3a3f8239bbe5-catalog-content\") pod \"167f8562-1584-47b3-9f2f-3a3f8239bbe5\" (UID: \"167f8562-1584-47b3-9f2f-3a3f8239bbe5\") " Dec 06 15:47:36 crc kubenswrapper[5003]: I1206 15:47:36.767166 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/167f8562-1584-47b3-9f2f-3a3f8239bbe5-utilities\") pod \"167f8562-1584-47b3-9f2f-3a3f8239bbe5\" (UID: \"167f8562-1584-47b3-9f2f-3a3f8239bbe5\") " Dec 06 15:47:36 crc kubenswrapper[5003]: I1206 15:47:36.767220 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ntz9t\" (UniqueName: \"kubernetes.io/projected/167f8562-1584-47b3-9f2f-3a3f8239bbe5-kube-api-access-ntz9t\") pod \"167f8562-1584-47b3-9f2f-3a3f8239bbe5\" (UID: \"167f8562-1584-47b3-9f2f-3a3f8239bbe5\") " Dec 06 15:47:36 crc kubenswrapper[5003]: I1206 15:47:36.768053 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/167f8562-1584-47b3-9f2f-3a3f8239bbe5-utilities" (OuterVolumeSpecName: "utilities") pod "167f8562-1584-47b3-9f2f-3a3f8239bbe5" (UID: "167f8562-1584-47b3-9f2f-3a3f8239bbe5"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 15:47:36 crc kubenswrapper[5003]: I1206 15:47:36.784363 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/167f8562-1584-47b3-9f2f-3a3f8239bbe5-kube-api-access-ntz9t" (OuterVolumeSpecName: "kube-api-access-ntz9t") pod "167f8562-1584-47b3-9f2f-3a3f8239bbe5" (UID: "167f8562-1584-47b3-9f2f-3a3f8239bbe5"). InnerVolumeSpecName "kube-api-access-ntz9t". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 15:47:36 crc kubenswrapper[5003]: I1206 15:47:36.815395 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/167f8562-1584-47b3-9f2f-3a3f8239bbe5-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "167f8562-1584-47b3-9f2f-3a3f8239bbe5" (UID: "167f8562-1584-47b3-9f2f-3a3f8239bbe5"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 15:47:36 crc kubenswrapper[5003]: I1206 15:47:36.868696 5003 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/167f8562-1584-47b3-9f2f-3a3f8239bbe5-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 06 15:47:36 crc kubenswrapper[5003]: I1206 15:47:36.868740 5003 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/167f8562-1584-47b3-9f2f-3a3f8239bbe5-utilities\") on node \"crc\" DevicePath \"\"" Dec 06 15:47:36 crc kubenswrapper[5003]: I1206 15:47:36.868754 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ntz9t\" (UniqueName: \"kubernetes.io/projected/167f8562-1584-47b3-9f2f-3a3f8239bbe5-kube-api-access-ntz9t\") on node \"crc\" DevicePath \"\"" Dec 06 15:47:37 crc kubenswrapper[5003]: I1206 15:47:37.302681 5003 generic.go:334] "Generic (PLEG): container finished" podID="167f8562-1584-47b3-9f2f-3a3f8239bbe5" containerID="eec956263bc49ca09ec19fb009ceaf0fa5fdda4a5ec1ae86b7b8e00b6eaef777" exitCode=0 Dec 06 15:47:37 crc kubenswrapper[5003]: I1206 15:47:37.302746 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-mssvp" Dec 06 15:47:37 crc kubenswrapper[5003]: I1206 15:47:37.302734 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mssvp" event={"ID":"167f8562-1584-47b3-9f2f-3a3f8239bbe5","Type":"ContainerDied","Data":"eec956263bc49ca09ec19fb009ceaf0fa5fdda4a5ec1ae86b7b8e00b6eaef777"} Dec 06 15:47:37 crc kubenswrapper[5003]: I1206 15:47:37.302864 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mssvp" event={"ID":"167f8562-1584-47b3-9f2f-3a3f8239bbe5","Type":"ContainerDied","Data":"d4ea6f9dc2edca6fd99bc02747f1eb1627b50c35449e8ee64b6556e9f79eb376"} Dec 06 15:47:37 crc kubenswrapper[5003]: I1206 15:47:37.302885 5003 scope.go:117] "RemoveContainer" containerID="eec956263bc49ca09ec19fb009ceaf0fa5fdda4a5ec1ae86b7b8e00b6eaef777" Dec 06 15:47:37 crc kubenswrapper[5003]: I1206 15:47:37.322701 5003 scope.go:117] "RemoveContainer" containerID="be01cf79c52b72235964bcff4c102061b358310fd2e2a1a63bf631e0e64ab3f5" Dec 06 15:47:37 crc kubenswrapper[5003]: I1206 15:47:37.338724 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-mssvp"] Dec 06 15:47:37 crc kubenswrapper[5003]: I1206 15:47:37.340114 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-mssvp"] Dec 06 15:47:37 crc kubenswrapper[5003]: I1206 15:47:37.353343 5003 scope.go:117] "RemoveContainer" containerID="85a5f580335e0e9e3edcd463e5a831a7514c9daef9f437cf14e639ed11b08a18" Dec 06 15:47:37 crc kubenswrapper[5003]: I1206 15:47:37.367847 5003 scope.go:117] "RemoveContainer" containerID="eec956263bc49ca09ec19fb009ceaf0fa5fdda4a5ec1ae86b7b8e00b6eaef777" Dec 06 15:47:37 crc kubenswrapper[5003]: E1206 15:47:37.368307 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"eec956263bc49ca09ec19fb009ceaf0fa5fdda4a5ec1ae86b7b8e00b6eaef777\": container with ID starting with eec956263bc49ca09ec19fb009ceaf0fa5fdda4a5ec1ae86b7b8e00b6eaef777 not found: ID does not exist" containerID="eec956263bc49ca09ec19fb009ceaf0fa5fdda4a5ec1ae86b7b8e00b6eaef777" Dec 06 15:47:37 crc kubenswrapper[5003]: I1206 15:47:37.368362 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"eec956263bc49ca09ec19fb009ceaf0fa5fdda4a5ec1ae86b7b8e00b6eaef777"} err="failed to get container status \"eec956263bc49ca09ec19fb009ceaf0fa5fdda4a5ec1ae86b7b8e00b6eaef777\": rpc error: code = NotFound desc = could not find container \"eec956263bc49ca09ec19fb009ceaf0fa5fdda4a5ec1ae86b7b8e00b6eaef777\": container with ID starting with eec956263bc49ca09ec19fb009ceaf0fa5fdda4a5ec1ae86b7b8e00b6eaef777 not found: ID does not exist" Dec 06 15:47:37 crc kubenswrapper[5003]: I1206 15:47:37.368396 5003 scope.go:117] "RemoveContainer" containerID="be01cf79c52b72235964bcff4c102061b358310fd2e2a1a63bf631e0e64ab3f5" Dec 06 15:47:37 crc kubenswrapper[5003]: E1206 15:47:37.369083 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"be01cf79c52b72235964bcff4c102061b358310fd2e2a1a63bf631e0e64ab3f5\": container with ID starting with be01cf79c52b72235964bcff4c102061b358310fd2e2a1a63bf631e0e64ab3f5 not found: ID does not exist" containerID="be01cf79c52b72235964bcff4c102061b358310fd2e2a1a63bf631e0e64ab3f5" Dec 06 15:47:37 crc kubenswrapper[5003]: I1206 15:47:37.369118 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"be01cf79c52b72235964bcff4c102061b358310fd2e2a1a63bf631e0e64ab3f5"} err="failed to get container status \"be01cf79c52b72235964bcff4c102061b358310fd2e2a1a63bf631e0e64ab3f5\": rpc error: code = NotFound desc = could not find container \"be01cf79c52b72235964bcff4c102061b358310fd2e2a1a63bf631e0e64ab3f5\": container with ID starting with be01cf79c52b72235964bcff4c102061b358310fd2e2a1a63bf631e0e64ab3f5 not found: ID does not exist" Dec 06 15:47:37 crc kubenswrapper[5003]: I1206 15:47:37.369140 5003 scope.go:117] "RemoveContainer" containerID="85a5f580335e0e9e3edcd463e5a831a7514c9daef9f437cf14e639ed11b08a18" Dec 06 15:47:37 crc kubenswrapper[5003]: E1206 15:47:37.369341 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"85a5f580335e0e9e3edcd463e5a831a7514c9daef9f437cf14e639ed11b08a18\": container with ID starting with 85a5f580335e0e9e3edcd463e5a831a7514c9daef9f437cf14e639ed11b08a18 not found: ID does not exist" containerID="85a5f580335e0e9e3edcd463e5a831a7514c9daef9f437cf14e639ed11b08a18" Dec 06 15:47:37 crc kubenswrapper[5003]: I1206 15:47:37.369370 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"85a5f580335e0e9e3edcd463e5a831a7514c9daef9f437cf14e639ed11b08a18"} err="failed to get container status \"85a5f580335e0e9e3edcd463e5a831a7514c9daef9f437cf14e639ed11b08a18\": rpc error: code = NotFound desc = could not find container \"85a5f580335e0e9e3edcd463e5a831a7514c9daef9f437cf14e639ed11b08a18\": container with ID starting with 85a5f580335e0e9e3edcd463e5a831a7514c9daef9f437cf14e639ed11b08a18 not found: ID does not exist" Dec 06 15:47:37 crc kubenswrapper[5003]: I1206 15:47:37.724621 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="167f8562-1584-47b3-9f2f-3a3f8239bbe5" path="/var/lib/kubelet/pods/167f8562-1584-47b3-9f2f-3a3f8239bbe5/volumes" Dec 06 15:47:43 crc kubenswrapper[5003]: I1206 15:47:43.186146 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-hbn5g" Dec 06 15:47:43 crc kubenswrapper[5003]: I1206 15:47:43.186771 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-hbn5g" Dec 06 15:47:43 crc kubenswrapper[5003]: I1206 15:47:43.227238 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-hbn5g" Dec 06 15:47:43 crc kubenswrapper[5003]: I1206 15:47:43.440784 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-hbn5g" Dec 06 15:47:44 crc kubenswrapper[5003]: I1206 15:47:44.456656 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-hbn5g"] Dec 06 15:47:45 crc kubenswrapper[5003]: I1206 15:47:45.371992 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-hbn5g" podUID="c2e79cb9-e11e-43fd-be96-8b35bcff5b83" containerName="registry-server" containerID="cri-o://d4dc0690ebcbe979be6f93646016aa017e57ef2496c10372863a03a8a9263787" gracePeriod=2 Dec 06 15:47:45 crc kubenswrapper[5003]: I1206 15:47:45.771312 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-hbn5g" Dec 06 15:47:45 crc kubenswrapper[5003]: I1206 15:47:45.893219 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c2e79cb9-e11e-43fd-be96-8b35bcff5b83-catalog-content\") pod \"c2e79cb9-e11e-43fd-be96-8b35bcff5b83\" (UID: \"c2e79cb9-e11e-43fd-be96-8b35bcff5b83\") " Dec 06 15:47:45 crc kubenswrapper[5003]: I1206 15:47:45.893277 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bgmjk\" (UniqueName: \"kubernetes.io/projected/c2e79cb9-e11e-43fd-be96-8b35bcff5b83-kube-api-access-bgmjk\") pod \"c2e79cb9-e11e-43fd-be96-8b35bcff5b83\" (UID: \"c2e79cb9-e11e-43fd-be96-8b35bcff5b83\") " Dec 06 15:47:45 crc kubenswrapper[5003]: I1206 15:47:45.893337 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c2e79cb9-e11e-43fd-be96-8b35bcff5b83-utilities\") pod \"c2e79cb9-e11e-43fd-be96-8b35bcff5b83\" (UID: \"c2e79cb9-e11e-43fd-be96-8b35bcff5b83\") " Dec 06 15:47:45 crc kubenswrapper[5003]: I1206 15:47:45.894480 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c2e79cb9-e11e-43fd-be96-8b35bcff5b83-utilities" (OuterVolumeSpecName: "utilities") pod "c2e79cb9-e11e-43fd-be96-8b35bcff5b83" (UID: "c2e79cb9-e11e-43fd-be96-8b35bcff5b83"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 15:47:45 crc kubenswrapper[5003]: I1206 15:47:45.901780 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c2e79cb9-e11e-43fd-be96-8b35bcff5b83-kube-api-access-bgmjk" (OuterVolumeSpecName: "kube-api-access-bgmjk") pod "c2e79cb9-e11e-43fd-be96-8b35bcff5b83" (UID: "c2e79cb9-e11e-43fd-be96-8b35bcff5b83"). InnerVolumeSpecName "kube-api-access-bgmjk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 15:47:45 crc kubenswrapper[5003]: I1206 15:47:45.995220 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bgmjk\" (UniqueName: \"kubernetes.io/projected/c2e79cb9-e11e-43fd-be96-8b35bcff5b83-kube-api-access-bgmjk\") on node \"crc\" DevicePath \"\"" Dec 06 15:47:45 crc kubenswrapper[5003]: I1206 15:47:45.995281 5003 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c2e79cb9-e11e-43fd-be96-8b35bcff5b83-utilities\") on node \"crc\" DevicePath \"\"" Dec 06 15:47:46 crc kubenswrapper[5003]: I1206 15:47:46.248038 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c2e79cb9-e11e-43fd-be96-8b35bcff5b83-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "c2e79cb9-e11e-43fd-be96-8b35bcff5b83" (UID: "c2e79cb9-e11e-43fd-be96-8b35bcff5b83"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 15:47:46 crc kubenswrapper[5003]: I1206 15:47:46.299809 5003 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c2e79cb9-e11e-43fd-be96-8b35bcff5b83-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 06 15:47:46 crc kubenswrapper[5003]: I1206 15:47:46.380252 5003 generic.go:334] "Generic (PLEG): container finished" podID="c2e79cb9-e11e-43fd-be96-8b35bcff5b83" containerID="d4dc0690ebcbe979be6f93646016aa017e57ef2496c10372863a03a8a9263787" exitCode=0 Dec 06 15:47:46 crc kubenswrapper[5003]: I1206 15:47:46.380307 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-hbn5g" event={"ID":"c2e79cb9-e11e-43fd-be96-8b35bcff5b83","Type":"ContainerDied","Data":"d4dc0690ebcbe979be6f93646016aa017e57ef2496c10372863a03a8a9263787"} Dec 06 15:47:46 crc kubenswrapper[5003]: I1206 15:47:46.380352 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-hbn5g" Dec 06 15:47:46 crc kubenswrapper[5003]: I1206 15:47:46.380379 5003 scope.go:117] "RemoveContainer" containerID="d4dc0690ebcbe979be6f93646016aa017e57ef2496c10372863a03a8a9263787" Dec 06 15:47:46 crc kubenswrapper[5003]: I1206 15:47:46.380365 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-hbn5g" event={"ID":"c2e79cb9-e11e-43fd-be96-8b35bcff5b83","Type":"ContainerDied","Data":"7ab401fae2cece9619eec1e64a94d8d894fec3f056db9d82f68c2b80aad9ce2d"} Dec 06 15:47:46 crc kubenswrapper[5003]: I1206 15:47:46.401979 5003 scope.go:117] "RemoveContainer" containerID="09d876ccd35d588ff0624dfac7651d4d5068562e6134af51f1d507fc4604b291" Dec 06 15:47:46 crc kubenswrapper[5003]: I1206 15:47:46.423235 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-hbn5g"] Dec 06 15:47:46 crc kubenswrapper[5003]: I1206 15:47:46.430205 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-hbn5g"] Dec 06 15:47:46 crc kubenswrapper[5003]: I1206 15:47:46.432066 5003 scope.go:117] "RemoveContainer" containerID="04e8e2c469bbb5cc2f0f29b0e16ad91c9f374c4ecf4db668bc12599bcc0c0f26" Dec 06 15:47:46 crc kubenswrapper[5003]: I1206 15:47:46.458837 5003 scope.go:117] "RemoveContainer" containerID="d4dc0690ebcbe979be6f93646016aa017e57ef2496c10372863a03a8a9263787" Dec 06 15:47:46 crc kubenswrapper[5003]: E1206 15:47:46.459374 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d4dc0690ebcbe979be6f93646016aa017e57ef2496c10372863a03a8a9263787\": container with ID starting with d4dc0690ebcbe979be6f93646016aa017e57ef2496c10372863a03a8a9263787 not found: ID does not exist" containerID="d4dc0690ebcbe979be6f93646016aa017e57ef2496c10372863a03a8a9263787" Dec 06 15:47:46 crc kubenswrapper[5003]: I1206 15:47:46.459425 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d4dc0690ebcbe979be6f93646016aa017e57ef2496c10372863a03a8a9263787"} err="failed to get container status \"d4dc0690ebcbe979be6f93646016aa017e57ef2496c10372863a03a8a9263787\": rpc error: code = NotFound desc = could not find container \"d4dc0690ebcbe979be6f93646016aa017e57ef2496c10372863a03a8a9263787\": container with ID starting with d4dc0690ebcbe979be6f93646016aa017e57ef2496c10372863a03a8a9263787 not found: ID does not exist" Dec 06 15:47:46 crc kubenswrapper[5003]: I1206 15:47:46.459464 5003 scope.go:117] "RemoveContainer" containerID="09d876ccd35d588ff0624dfac7651d4d5068562e6134af51f1d507fc4604b291" Dec 06 15:47:46 crc kubenswrapper[5003]: E1206 15:47:46.460047 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"09d876ccd35d588ff0624dfac7651d4d5068562e6134af51f1d507fc4604b291\": container with ID starting with 09d876ccd35d588ff0624dfac7651d4d5068562e6134af51f1d507fc4604b291 not found: ID does not exist" containerID="09d876ccd35d588ff0624dfac7651d4d5068562e6134af51f1d507fc4604b291" Dec 06 15:47:46 crc kubenswrapper[5003]: I1206 15:47:46.460132 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"09d876ccd35d588ff0624dfac7651d4d5068562e6134af51f1d507fc4604b291"} err="failed to get container status \"09d876ccd35d588ff0624dfac7651d4d5068562e6134af51f1d507fc4604b291\": rpc error: code = NotFound desc = could not find container \"09d876ccd35d588ff0624dfac7651d4d5068562e6134af51f1d507fc4604b291\": container with ID starting with 09d876ccd35d588ff0624dfac7651d4d5068562e6134af51f1d507fc4604b291 not found: ID does not exist" Dec 06 15:47:46 crc kubenswrapper[5003]: I1206 15:47:46.460167 5003 scope.go:117] "RemoveContainer" containerID="04e8e2c469bbb5cc2f0f29b0e16ad91c9f374c4ecf4db668bc12599bcc0c0f26" Dec 06 15:47:46 crc kubenswrapper[5003]: E1206 15:47:46.460563 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"04e8e2c469bbb5cc2f0f29b0e16ad91c9f374c4ecf4db668bc12599bcc0c0f26\": container with ID starting with 04e8e2c469bbb5cc2f0f29b0e16ad91c9f374c4ecf4db668bc12599bcc0c0f26 not found: ID does not exist" containerID="04e8e2c469bbb5cc2f0f29b0e16ad91c9f374c4ecf4db668bc12599bcc0c0f26" Dec 06 15:47:46 crc kubenswrapper[5003]: I1206 15:47:46.460624 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"04e8e2c469bbb5cc2f0f29b0e16ad91c9f374c4ecf4db668bc12599bcc0c0f26"} err="failed to get container status \"04e8e2c469bbb5cc2f0f29b0e16ad91c9f374c4ecf4db668bc12599bcc0c0f26\": rpc error: code = NotFound desc = could not find container \"04e8e2c469bbb5cc2f0f29b0e16ad91c9f374c4ecf4db668bc12599bcc0c0f26\": container with ID starting with 04e8e2c469bbb5cc2f0f29b0e16ad91c9f374c4ecf4db668bc12599bcc0c0f26 not found: ID does not exist" Dec 06 15:47:47 crc kubenswrapper[5003]: I1206 15:47:47.722610 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c2e79cb9-e11e-43fd-be96-8b35bcff5b83" path="/var/lib/kubelet/pods/c2e79cb9-e11e-43fd-be96-8b35bcff5b83/volumes" Dec 06 15:47:48 crc kubenswrapper[5003]: I1206 15:47:48.572639 5003 patch_prober.go:28] interesting pod/machine-config-daemon-w25db container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 06 15:47:48 crc kubenswrapper[5003]: I1206 15:47:48.572720 5003 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-w25db" podUID="1a047c4d-003e-4668-9b96-945eab34ab68" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 06 15:47:49 crc kubenswrapper[5003]: I1206 15:47:49.908088 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/e9e5855e5cd3b19ff946d1a783ccd6861442182df01f7778e40ab7fce76mmm5"] Dec 06 15:47:49 crc kubenswrapper[5003]: E1206 15:47:49.908659 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="167f8562-1584-47b3-9f2f-3a3f8239bbe5" containerName="extract-content" Dec 06 15:47:49 crc kubenswrapper[5003]: I1206 15:47:49.908673 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="167f8562-1584-47b3-9f2f-3a3f8239bbe5" containerName="extract-content" Dec 06 15:47:49 crc kubenswrapper[5003]: E1206 15:47:49.908686 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="167f8562-1584-47b3-9f2f-3a3f8239bbe5" containerName="registry-server" Dec 06 15:47:49 crc kubenswrapper[5003]: I1206 15:47:49.908692 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="167f8562-1584-47b3-9f2f-3a3f8239bbe5" containerName="registry-server" Dec 06 15:47:49 crc kubenswrapper[5003]: E1206 15:47:49.908701 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="167f8562-1584-47b3-9f2f-3a3f8239bbe5" containerName="extract-utilities" Dec 06 15:47:49 crc kubenswrapper[5003]: I1206 15:47:49.908707 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="167f8562-1584-47b3-9f2f-3a3f8239bbe5" containerName="extract-utilities" Dec 06 15:47:49 crc kubenswrapper[5003]: E1206 15:47:49.908719 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c2e79cb9-e11e-43fd-be96-8b35bcff5b83" containerName="registry-server" Dec 06 15:47:49 crc kubenswrapper[5003]: I1206 15:47:49.908724 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="c2e79cb9-e11e-43fd-be96-8b35bcff5b83" containerName="registry-server" Dec 06 15:47:49 crc kubenswrapper[5003]: E1206 15:47:49.908733 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c2e79cb9-e11e-43fd-be96-8b35bcff5b83" containerName="extract-utilities" Dec 06 15:47:49 crc kubenswrapper[5003]: I1206 15:47:49.908738 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="c2e79cb9-e11e-43fd-be96-8b35bcff5b83" containerName="extract-utilities" Dec 06 15:47:49 crc kubenswrapper[5003]: E1206 15:47:49.908756 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c2e79cb9-e11e-43fd-be96-8b35bcff5b83" containerName="extract-content" Dec 06 15:47:49 crc kubenswrapper[5003]: I1206 15:47:49.908762 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="c2e79cb9-e11e-43fd-be96-8b35bcff5b83" containerName="extract-content" Dec 06 15:47:49 crc kubenswrapper[5003]: I1206 15:47:49.908909 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="c2e79cb9-e11e-43fd-be96-8b35bcff5b83" containerName="registry-server" Dec 06 15:47:49 crc kubenswrapper[5003]: I1206 15:47:49.908929 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="167f8562-1584-47b3-9f2f-3a3f8239bbe5" containerName="registry-server" Dec 06 15:47:49 crc kubenswrapper[5003]: I1206 15:47:49.909902 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/e9e5855e5cd3b19ff946d1a783ccd6861442182df01f7778e40ab7fce76mmm5" Dec 06 15:47:49 crc kubenswrapper[5003]: I1206 15:47:49.912454 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"default-dockercfg-kqjk5" Dec 06 15:47:49 crc kubenswrapper[5003]: I1206 15:47:49.924558 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/e9e5855e5cd3b19ff946d1a783ccd6861442182df01f7778e40ab7fce76mmm5"] Dec 06 15:47:50 crc kubenswrapper[5003]: I1206 15:47:50.043960 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qwrgq\" (UniqueName: \"kubernetes.io/projected/6711a352-d8f2-4191-b0a5-f4c68b72d443-kube-api-access-qwrgq\") pod \"e9e5855e5cd3b19ff946d1a783ccd6861442182df01f7778e40ab7fce76mmm5\" (UID: \"6711a352-d8f2-4191-b0a5-f4c68b72d443\") " pod="openstack-operators/e9e5855e5cd3b19ff946d1a783ccd6861442182df01f7778e40ab7fce76mmm5" Dec 06 15:47:50 crc kubenswrapper[5003]: I1206 15:47:50.044021 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/6711a352-d8f2-4191-b0a5-f4c68b72d443-util\") pod \"e9e5855e5cd3b19ff946d1a783ccd6861442182df01f7778e40ab7fce76mmm5\" (UID: \"6711a352-d8f2-4191-b0a5-f4c68b72d443\") " pod="openstack-operators/e9e5855e5cd3b19ff946d1a783ccd6861442182df01f7778e40ab7fce76mmm5" Dec 06 15:47:50 crc kubenswrapper[5003]: I1206 15:47:50.044069 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/6711a352-d8f2-4191-b0a5-f4c68b72d443-bundle\") pod \"e9e5855e5cd3b19ff946d1a783ccd6861442182df01f7778e40ab7fce76mmm5\" (UID: \"6711a352-d8f2-4191-b0a5-f4c68b72d443\") " pod="openstack-operators/e9e5855e5cd3b19ff946d1a783ccd6861442182df01f7778e40ab7fce76mmm5" Dec 06 15:47:50 crc kubenswrapper[5003]: I1206 15:47:50.145167 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qwrgq\" (UniqueName: \"kubernetes.io/projected/6711a352-d8f2-4191-b0a5-f4c68b72d443-kube-api-access-qwrgq\") pod \"e9e5855e5cd3b19ff946d1a783ccd6861442182df01f7778e40ab7fce76mmm5\" (UID: \"6711a352-d8f2-4191-b0a5-f4c68b72d443\") " pod="openstack-operators/e9e5855e5cd3b19ff946d1a783ccd6861442182df01f7778e40ab7fce76mmm5" Dec 06 15:47:50 crc kubenswrapper[5003]: I1206 15:47:50.145245 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/6711a352-d8f2-4191-b0a5-f4c68b72d443-util\") pod \"e9e5855e5cd3b19ff946d1a783ccd6861442182df01f7778e40ab7fce76mmm5\" (UID: \"6711a352-d8f2-4191-b0a5-f4c68b72d443\") " pod="openstack-operators/e9e5855e5cd3b19ff946d1a783ccd6861442182df01f7778e40ab7fce76mmm5" Dec 06 15:47:50 crc kubenswrapper[5003]: I1206 15:47:50.145302 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/6711a352-d8f2-4191-b0a5-f4c68b72d443-bundle\") pod \"e9e5855e5cd3b19ff946d1a783ccd6861442182df01f7778e40ab7fce76mmm5\" (UID: \"6711a352-d8f2-4191-b0a5-f4c68b72d443\") " pod="openstack-operators/e9e5855e5cd3b19ff946d1a783ccd6861442182df01f7778e40ab7fce76mmm5" Dec 06 15:47:50 crc kubenswrapper[5003]: I1206 15:47:50.145835 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/6711a352-d8f2-4191-b0a5-f4c68b72d443-bundle\") pod \"e9e5855e5cd3b19ff946d1a783ccd6861442182df01f7778e40ab7fce76mmm5\" (UID: \"6711a352-d8f2-4191-b0a5-f4c68b72d443\") " pod="openstack-operators/e9e5855e5cd3b19ff946d1a783ccd6861442182df01f7778e40ab7fce76mmm5" Dec 06 15:47:50 crc kubenswrapper[5003]: I1206 15:47:50.145971 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/6711a352-d8f2-4191-b0a5-f4c68b72d443-util\") pod \"e9e5855e5cd3b19ff946d1a783ccd6861442182df01f7778e40ab7fce76mmm5\" (UID: \"6711a352-d8f2-4191-b0a5-f4c68b72d443\") " pod="openstack-operators/e9e5855e5cd3b19ff946d1a783ccd6861442182df01f7778e40ab7fce76mmm5" Dec 06 15:47:50 crc kubenswrapper[5003]: I1206 15:47:50.164215 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qwrgq\" (UniqueName: \"kubernetes.io/projected/6711a352-d8f2-4191-b0a5-f4c68b72d443-kube-api-access-qwrgq\") pod \"e9e5855e5cd3b19ff946d1a783ccd6861442182df01f7778e40ab7fce76mmm5\" (UID: \"6711a352-d8f2-4191-b0a5-f4c68b72d443\") " pod="openstack-operators/e9e5855e5cd3b19ff946d1a783ccd6861442182df01f7778e40ab7fce76mmm5" Dec 06 15:47:50 crc kubenswrapper[5003]: I1206 15:47:50.236822 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/e9e5855e5cd3b19ff946d1a783ccd6861442182df01f7778e40ab7fce76mmm5" Dec 06 15:47:50 crc kubenswrapper[5003]: I1206 15:47:50.693231 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/e9e5855e5cd3b19ff946d1a783ccd6861442182df01f7778e40ab7fce76mmm5"] Dec 06 15:47:50 crc kubenswrapper[5003]: W1206 15:47:50.697378 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6711a352_d8f2_4191_b0a5_f4c68b72d443.slice/crio-92cf7190197b998297a8bfd5e2fde641eb56f8824a6ef4da68fc6c5b6d1ef2c4 WatchSource:0}: Error finding container 92cf7190197b998297a8bfd5e2fde641eb56f8824a6ef4da68fc6c5b6d1ef2c4: Status 404 returned error can't find the container with id 92cf7190197b998297a8bfd5e2fde641eb56f8824a6ef4da68fc6c5b6d1ef2c4 Dec 06 15:47:51 crc kubenswrapper[5003]: I1206 15:47:51.410778 5003 generic.go:334] "Generic (PLEG): container finished" podID="6711a352-d8f2-4191-b0a5-f4c68b72d443" containerID="4aecbed679ade5396ff7932cc4a4f0e4badd8ac34230cdb84d59c462180240cf" exitCode=0 Dec 06 15:47:51 crc kubenswrapper[5003]: I1206 15:47:51.410882 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/e9e5855e5cd3b19ff946d1a783ccd6861442182df01f7778e40ab7fce76mmm5" event={"ID":"6711a352-d8f2-4191-b0a5-f4c68b72d443","Type":"ContainerDied","Data":"4aecbed679ade5396ff7932cc4a4f0e4badd8ac34230cdb84d59c462180240cf"} Dec 06 15:47:51 crc kubenswrapper[5003]: I1206 15:47:51.411126 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/e9e5855e5cd3b19ff946d1a783ccd6861442182df01f7778e40ab7fce76mmm5" event={"ID":"6711a352-d8f2-4191-b0a5-f4c68b72d443","Type":"ContainerStarted","Data":"92cf7190197b998297a8bfd5e2fde641eb56f8824a6ef4da68fc6c5b6d1ef2c4"} Dec 06 15:47:52 crc kubenswrapper[5003]: I1206 15:47:52.417738 5003 generic.go:334] "Generic (PLEG): container finished" podID="6711a352-d8f2-4191-b0a5-f4c68b72d443" containerID="c5d84a296a123c4f522a641bc63dd9e6225843feac9cb7f0db3b50294136766d" exitCode=0 Dec 06 15:47:52 crc kubenswrapper[5003]: I1206 15:47:52.417784 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/e9e5855e5cd3b19ff946d1a783ccd6861442182df01f7778e40ab7fce76mmm5" event={"ID":"6711a352-d8f2-4191-b0a5-f4c68b72d443","Type":"ContainerDied","Data":"c5d84a296a123c4f522a641bc63dd9e6225843feac9cb7f0db3b50294136766d"} Dec 06 15:47:53 crc kubenswrapper[5003]: I1206 15:47:53.427725 5003 generic.go:334] "Generic (PLEG): container finished" podID="6711a352-d8f2-4191-b0a5-f4c68b72d443" containerID="6e89417ac8b7e14169c40458d7c1371e81f5caf0850d9f26625e9facb8a4afdc" exitCode=0 Dec 06 15:47:53 crc kubenswrapper[5003]: I1206 15:47:53.427862 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/e9e5855e5cd3b19ff946d1a783ccd6861442182df01f7778e40ab7fce76mmm5" event={"ID":"6711a352-d8f2-4191-b0a5-f4c68b72d443","Type":"ContainerDied","Data":"6e89417ac8b7e14169c40458d7c1371e81f5caf0850d9f26625e9facb8a4afdc"} Dec 06 15:47:54 crc kubenswrapper[5003]: I1206 15:47:54.658827 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/e9e5855e5cd3b19ff946d1a783ccd6861442182df01f7778e40ab7fce76mmm5" Dec 06 15:47:54 crc kubenswrapper[5003]: I1206 15:47:54.717848 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qwrgq\" (UniqueName: \"kubernetes.io/projected/6711a352-d8f2-4191-b0a5-f4c68b72d443-kube-api-access-qwrgq\") pod \"6711a352-d8f2-4191-b0a5-f4c68b72d443\" (UID: \"6711a352-d8f2-4191-b0a5-f4c68b72d443\") " Dec 06 15:47:54 crc kubenswrapper[5003]: I1206 15:47:54.717971 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/6711a352-d8f2-4191-b0a5-f4c68b72d443-util\") pod \"6711a352-d8f2-4191-b0a5-f4c68b72d443\" (UID: \"6711a352-d8f2-4191-b0a5-f4c68b72d443\") " Dec 06 15:47:54 crc kubenswrapper[5003]: I1206 15:47:54.718010 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/6711a352-d8f2-4191-b0a5-f4c68b72d443-bundle\") pod \"6711a352-d8f2-4191-b0a5-f4c68b72d443\" (UID: \"6711a352-d8f2-4191-b0a5-f4c68b72d443\") " Dec 06 15:47:54 crc kubenswrapper[5003]: I1206 15:47:54.720906 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6711a352-d8f2-4191-b0a5-f4c68b72d443-bundle" (OuterVolumeSpecName: "bundle") pod "6711a352-d8f2-4191-b0a5-f4c68b72d443" (UID: "6711a352-d8f2-4191-b0a5-f4c68b72d443"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 15:47:54 crc kubenswrapper[5003]: I1206 15:47:54.725849 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6711a352-d8f2-4191-b0a5-f4c68b72d443-kube-api-access-qwrgq" (OuterVolumeSpecName: "kube-api-access-qwrgq") pod "6711a352-d8f2-4191-b0a5-f4c68b72d443" (UID: "6711a352-d8f2-4191-b0a5-f4c68b72d443"). InnerVolumeSpecName "kube-api-access-qwrgq". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 15:47:54 crc kubenswrapper[5003]: I1206 15:47:54.736344 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6711a352-d8f2-4191-b0a5-f4c68b72d443-util" (OuterVolumeSpecName: "util") pod "6711a352-d8f2-4191-b0a5-f4c68b72d443" (UID: "6711a352-d8f2-4191-b0a5-f4c68b72d443"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 15:47:54 crc kubenswrapper[5003]: I1206 15:47:54.819551 5003 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/6711a352-d8f2-4191-b0a5-f4c68b72d443-util\") on node \"crc\" DevicePath \"\"" Dec 06 15:47:54 crc kubenswrapper[5003]: I1206 15:47:54.819770 5003 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/6711a352-d8f2-4191-b0a5-f4c68b72d443-bundle\") on node \"crc\" DevicePath \"\"" Dec 06 15:47:54 crc kubenswrapper[5003]: I1206 15:47:54.819779 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qwrgq\" (UniqueName: \"kubernetes.io/projected/6711a352-d8f2-4191-b0a5-f4c68b72d443-kube-api-access-qwrgq\") on node \"crc\" DevicePath \"\"" Dec 06 15:47:55 crc kubenswrapper[5003]: I1206 15:47:55.441840 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/e9e5855e5cd3b19ff946d1a783ccd6861442182df01f7778e40ab7fce76mmm5" event={"ID":"6711a352-d8f2-4191-b0a5-f4c68b72d443","Type":"ContainerDied","Data":"92cf7190197b998297a8bfd5e2fde641eb56f8824a6ef4da68fc6c5b6d1ef2c4"} Dec 06 15:47:55 crc kubenswrapper[5003]: I1206 15:47:55.441898 5003 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="92cf7190197b998297a8bfd5e2fde641eb56f8824a6ef4da68fc6c5b6d1ef2c4" Dec 06 15:47:55 crc kubenswrapper[5003]: I1206 15:47:55.442251 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/e9e5855e5cd3b19ff946d1a783ccd6861442182df01f7778e40ab7fce76mmm5" Dec 06 15:47:56 crc kubenswrapper[5003]: I1206 15:47:56.271549 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-jbzsr"] Dec 06 15:47:56 crc kubenswrapper[5003]: E1206 15:47:56.272005 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6711a352-d8f2-4191-b0a5-f4c68b72d443" containerName="extract" Dec 06 15:47:56 crc kubenswrapper[5003]: I1206 15:47:56.272028 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="6711a352-d8f2-4191-b0a5-f4c68b72d443" containerName="extract" Dec 06 15:47:56 crc kubenswrapper[5003]: E1206 15:47:56.272047 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6711a352-d8f2-4191-b0a5-f4c68b72d443" containerName="util" Dec 06 15:47:56 crc kubenswrapper[5003]: I1206 15:47:56.272060 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="6711a352-d8f2-4191-b0a5-f4c68b72d443" containerName="util" Dec 06 15:47:56 crc kubenswrapper[5003]: E1206 15:47:56.272096 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6711a352-d8f2-4191-b0a5-f4c68b72d443" containerName="pull" Dec 06 15:47:56 crc kubenswrapper[5003]: I1206 15:47:56.272110 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="6711a352-d8f2-4191-b0a5-f4c68b72d443" containerName="pull" Dec 06 15:47:56 crc kubenswrapper[5003]: I1206 15:47:56.272362 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="6711a352-d8f2-4191-b0a5-f4c68b72d443" containerName="extract" Dec 06 15:47:56 crc kubenswrapper[5003]: I1206 15:47:56.274144 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-jbzsr" Dec 06 15:47:56 crc kubenswrapper[5003]: I1206 15:47:56.283955 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-jbzsr"] Dec 06 15:47:56 crc kubenswrapper[5003]: I1206 15:47:56.341009 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kbkjs\" (UniqueName: \"kubernetes.io/projected/e257780d-b2bd-44c0-8238-898a53ae4cb3-kube-api-access-kbkjs\") pod \"community-operators-jbzsr\" (UID: \"e257780d-b2bd-44c0-8238-898a53ae4cb3\") " pod="openshift-marketplace/community-operators-jbzsr" Dec 06 15:47:56 crc kubenswrapper[5003]: I1206 15:47:56.341101 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e257780d-b2bd-44c0-8238-898a53ae4cb3-utilities\") pod \"community-operators-jbzsr\" (UID: \"e257780d-b2bd-44c0-8238-898a53ae4cb3\") " pod="openshift-marketplace/community-operators-jbzsr" Dec 06 15:47:56 crc kubenswrapper[5003]: I1206 15:47:56.341531 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e257780d-b2bd-44c0-8238-898a53ae4cb3-catalog-content\") pod \"community-operators-jbzsr\" (UID: \"e257780d-b2bd-44c0-8238-898a53ae4cb3\") " pod="openshift-marketplace/community-operators-jbzsr" Dec 06 15:47:56 crc kubenswrapper[5003]: I1206 15:47:56.442508 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e257780d-b2bd-44c0-8238-898a53ae4cb3-catalog-content\") pod \"community-operators-jbzsr\" (UID: \"e257780d-b2bd-44c0-8238-898a53ae4cb3\") " pod="openshift-marketplace/community-operators-jbzsr" Dec 06 15:47:56 crc kubenswrapper[5003]: I1206 15:47:56.442575 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kbkjs\" (UniqueName: \"kubernetes.io/projected/e257780d-b2bd-44c0-8238-898a53ae4cb3-kube-api-access-kbkjs\") pod \"community-operators-jbzsr\" (UID: \"e257780d-b2bd-44c0-8238-898a53ae4cb3\") " pod="openshift-marketplace/community-operators-jbzsr" Dec 06 15:47:56 crc kubenswrapper[5003]: I1206 15:47:56.442603 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e257780d-b2bd-44c0-8238-898a53ae4cb3-utilities\") pod \"community-operators-jbzsr\" (UID: \"e257780d-b2bd-44c0-8238-898a53ae4cb3\") " pod="openshift-marketplace/community-operators-jbzsr" Dec 06 15:47:56 crc kubenswrapper[5003]: I1206 15:47:56.443325 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e257780d-b2bd-44c0-8238-898a53ae4cb3-utilities\") pod \"community-operators-jbzsr\" (UID: \"e257780d-b2bd-44c0-8238-898a53ae4cb3\") " pod="openshift-marketplace/community-operators-jbzsr" Dec 06 15:47:56 crc kubenswrapper[5003]: I1206 15:47:56.443323 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e257780d-b2bd-44c0-8238-898a53ae4cb3-catalog-content\") pod \"community-operators-jbzsr\" (UID: \"e257780d-b2bd-44c0-8238-898a53ae4cb3\") " pod="openshift-marketplace/community-operators-jbzsr" Dec 06 15:47:56 crc kubenswrapper[5003]: I1206 15:47:56.460993 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kbkjs\" (UniqueName: \"kubernetes.io/projected/e257780d-b2bd-44c0-8238-898a53ae4cb3-kube-api-access-kbkjs\") pod \"community-operators-jbzsr\" (UID: \"e257780d-b2bd-44c0-8238-898a53ae4cb3\") " pod="openshift-marketplace/community-operators-jbzsr" Dec 06 15:47:56 crc kubenswrapper[5003]: I1206 15:47:56.603387 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-jbzsr" Dec 06 15:47:57 crc kubenswrapper[5003]: I1206 15:47:57.084165 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-jbzsr"] Dec 06 15:47:57 crc kubenswrapper[5003]: W1206 15:47:57.097064 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode257780d_b2bd_44c0_8238_898a53ae4cb3.slice/crio-5d762f35e5f3e24d648ae5f81ac3b4b4c49d4129a5d029fccbc86581f9b02862 WatchSource:0}: Error finding container 5d762f35e5f3e24d648ae5f81ac3b4b4c49d4129a5d029fccbc86581f9b02862: Status 404 returned error can't find the container with id 5d762f35e5f3e24d648ae5f81ac3b4b4c49d4129a5d029fccbc86581f9b02862 Dec 06 15:47:57 crc kubenswrapper[5003]: I1206 15:47:57.455542 5003 generic.go:334] "Generic (PLEG): container finished" podID="e257780d-b2bd-44c0-8238-898a53ae4cb3" containerID="df8f4b467c3b88a76ec1a68505251850e64dbd91b9546a738fd33dce5d1c7435" exitCode=0 Dec 06 15:47:57 crc kubenswrapper[5003]: I1206 15:47:57.455585 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jbzsr" event={"ID":"e257780d-b2bd-44c0-8238-898a53ae4cb3","Type":"ContainerDied","Data":"df8f4b467c3b88a76ec1a68505251850e64dbd91b9546a738fd33dce5d1c7435"} Dec 06 15:47:57 crc kubenswrapper[5003]: I1206 15:47:57.455608 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jbzsr" event={"ID":"e257780d-b2bd-44c0-8238-898a53ae4cb3","Type":"ContainerStarted","Data":"5d762f35e5f3e24d648ae5f81ac3b4b4c49d4129a5d029fccbc86581f9b02862"} Dec 06 15:47:58 crc kubenswrapper[5003]: I1206 15:47:58.462935 5003 generic.go:334] "Generic (PLEG): container finished" podID="e257780d-b2bd-44c0-8238-898a53ae4cb3" containerID="442876e36d919d279a7c39272afe7230173643c1f06ba1f86b84922fa033f1bb" exitCode=0 Dec 06 15:47:58 crc kubenswrapper[5003]: I1206 15:47:58.463035 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jbzsr" event={"ID":"e257780d-b2bd-44c0-8238-898a53ae4cb3","Type":"ContainerDied","Data":"442876e36d919d279a7c39272afe7230173643c1f06ba1f86b84922fa033f1bb"} Dec 06 15:47:59 crc kubenswrapper[5003]: I1206 15:47:59.469753 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jbzsr" event={"ID":"e257780d-b2bd-44c0-8238-898a53ae4cb3","Type":"ContainerStarted","Data":"4bfc334444437590f68151e50877adc614ff6b0011ffa7a56892cb4cd29ee25b"} Dec 06 15:47:59 crc kubenswrapper[5003]: I1206 15:47:59.487834 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-jbzsr" podStartSLOduration=1.998287581 podStartE2EDuration="3.487814265s" podCreationTimestamp="2025-12-06 15:47:56 +0000 UTC" firstStartedPulling="2025-12-06 15:47:57.456786947 +0000 UTC m=+955.990141328" lastFinishedPulling="2025-12-06 15:47:58.946313631 +0000 UTC m=+957.479668012" observedRunningTime="2025-12-06 15:47:59.484845763 +0000 UTC m=+958.018200164" watchObservedRunningTime="2025-12-06 15:47:59.487814265 +0000 UTC m=+958.021168646" Dec 06 15:48:00 crc kubenswrapper[5003]: I1206 15:48:00.246769 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-6fd8f69c54-5f56z"] Dec 06 15:48:00 crc kubenswrapper[5003]: I1206 15:48:00.248014 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-6fd8f69c54-5f56z" Dec 06 15:48:00 crc kubenswrapper[5003]: I1206 15:48:00.249833 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"mariadb-operator-controller-manager-dockercfg-b9gff" Dec 06 15:48:00 crc kubenswrapper[5003]: I1206 15:48:00.250474 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"webhook-server-cert" Dec 06 15:48:00 crc kubenswrapper[5003]: I1206 15:48:00.252824 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"mariadb-operator-controller-manager-service-cert" Dec 06 15:48:00 crc kubenswrapper[5003]: I1206 15:48:00.271931 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-6fd8f69c54-5f56z"] Dec 06 15:48:00 crc kubenswrapper[5003]: I1206 15:48:00.297546 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5458q\" (UniqueName: \"kubernetes.io/projected/73f33e10-66ff-41e9-97a3-e8cd2db5e39e-kube-api-access-5458q\") pod \"mariadb-operator-controller-manager-6fd8f69c54-5f56z\" (UID: \"73f33e10-66ff-41e9-97a3-e8cd2db5e39e\") " pod="openstack-operators/mariadb-operator-controller-manager-6fd8f69c54-5f56z" Dec 06 15:48:00 crc kubenswrapper[5003]: I1206 15:48:00.297647 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/73f33e10-66ff-41e9-97a3-e8cd2db5e39e-apiservice-cert\") pod \"mariadb-operator-controller-manager-6fd8f69c54-5f56z\" (UID: \"73f33e10-66ff-41e9-97a3-e8cd2db5e39e\") " pod="openstack-operators/mariadb-operator-controller-manager-6fd8f69c54-5f56z" Dec 06 15:48:00 crc kubenswrapper[5003]: I1206 15:48:00.297686 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/73f33e10-66ff-41e9-97a3-e8cd2db5e39e-webhook-cert\") pod \"mariadb-operator-controller-manager-6fd8f69c54-5f56z\" (UID: \"73f33e10-66ff-41e9-97a3-e8cd2db5e39e\") " pod="openstack-operators/mariadb-operator-controller-manager-6fd8f69c54-5f56z" Dec 06 15:48:00 crc kubenswrapper[5003]: I1206 15:48:00.398557 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5458q\" (UniqueName: \"kubernetes.io/projected/73f33e10-66ff-41e9-97a3-e8cd2db5e39e-kube-api-access-5458q\") pod \"mariadb-operator-controller-manager-6fd8f69c54-5f56z\" (UID: \"73f33e10-66ff-41e9-97a3-e8cd2db5e39e\") " pod="openstack-operators/mariadb-operator-controller-manager-6fd8f69c54-5f56z" Dec 06 15:48:00 crc kubenswrapper[5003]: I1206 15:48:00.398668 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/73f33e10-66ff-41e9-97a3-e8cd2db5e39e-apiservice-cert\") pod \"mariadb-operator-controller-manager-6fd8f69c54-5f56z\" (UID: \"73f33e10-66ff-41e9-97a3-e8cd2db5e39e\") " pod="openstack-operators/mariadb-operator-controller-manager-6fd8f69c54-5f56z" Dec 06 15:48:00 crc kubenswrapper[5003]: I1206 15:48:00.398709 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/73f33e10-66ff-41e9-97a3-e8cd2db5e39e-webhook-cert\") pod \"mariadb-operator-controller-manager-6fd8f69c54-5f56z\" (UID: \"73f33e10-66ff-41e9-97a3-e8cd2db5e39e\") " pod="openstack-operators/mariadb-operator-controller-manager-6fd8f69c54-5f56z" Dec 06 15:48:00 crc kubenswrapper[5003]: I1206 15:48:00.404718 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/73f33e10-66ff-41e9-97a3-e8cd2db5e39e-apiservice-cert\") pod \"mariadb-operator-controller-manager-6fd8f69c54-5f56z\" (UID: \"73f33e10-66ff-41e9-97a3-e8cd2db5e39e\") " pod="openstack-operators/mariadb-operator-controller-manager-6fd8f69c54-5f56z" Dec 06 15:48:00 crc kubenswrapper[5003]: I1206 15:48:00.408089 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/73f33e10-66ff-41e9-97a3-e8cd2db5e39e-webhook-cert\") pod \"mariadb-operator-controller-manager-6fd8f69c54-5f56z\" (UID: \"73f33e10-66ff-41e9-97a3-e8cd2db5e39e\") " pod="openstack-operators/mariadb-operator-controller-manager-6fd8f69c54-5f56z" Dec 06 15:48:00 crc kubenswrapper[5003]: I1206 15:48:00.423241 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5458q\" (UniqueName: \"kubernetes.io/projected/73f33e10-66ff-41e9-97a3-e8cd2db5e39e-kube-api-access-5458q\") pod \"mariadb-operator-controller-manager-6fd8f69c54-5f56z\" (UID: \"73f33e10-66ff-41e9-97a3-e8cd2db5e39e\") " pod="openstack-operators/mariadb-operator-controller-manager-6fd8f69c54-5f56z" Dec 06 15:48:00 crc kubenswrapper[5003]: I1206 15:48:00.566737 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-6fd8f69c54-5f56z" Dec 06 15:48:00 crc kubenswrapper[5003]: I1206 15:48:00.800849 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-6fd8f69c54-5f56z"] Dec 06 15:48:00 crc kubenswrapper[5003]: W1206 15:48:00.806467 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod73f33e10_66ff_41e9_97a3_e8cd2db5e39e.slice/crio-eae07f18256d7c09707b55224700c6c97b5b011a21e4440d8470aa2c7f299302 WatchSource:0}: Error finding container eae07f18256d7c09707b55224700c6c97b5b011a21e4440d8470aa2c7f299302: Status 404 returned error can't find the container with id eae07f18256d7c09707b55224700c6c97b5b011a21e4440d8470aa2c7f299302 Dec 06 15:48:01 crc kubenswrapper[5003]: I1206 15:48:01.492476 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-6fd8f69c54-5f56z" event={"ID":"73f33e10-66ff-41e9-97a3-e8cd2db5e39e","Type":"ContainerStarted","Data":"eae07f18256d7c09707b55224700c6c97b5b011a21e4440d8470aa2c7f299302"} Dec 06 15:48:05 crc kubenswrapper[5003]: I1206 15:48:05.514887 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-6fd8f69c54-5f56z" event={"ID":"73f33e10-66ff-41e9-97a3-e8cd2db5e39e","Type":"ContainerStarted","Data":"be04d6be433878f1997040b2c461c22a91ecd811282e70b7ae1e7fb706d37f64"} Dec 06 15:48:05 crc kubenswrapper[5003]: I1206 15:48:05.515460 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/mariadb-operator-controller-manager-6fd8f69c54-5f56z" Dec 06 15:48:05 crc kubenswrapper[5003]: I1206 15:48:05.553399 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/mariadb-operator-controller-manager-6fd8f69c54-5f56z" podStartSLOduration=1.938697055 podStartE2EDuration="5.55337604s" podCreationTimestamp="2025-12-06 15:48:00 +0000 UTC" firstStartedPulling="2025-12-06 15:48:00.808131185 +0000 UTC m=+959.341485566" lastFinishedPulling="2025-12-06 15:48:04.42281017 +0000 UTC m=+962.956164551" observedRunningTime="2025-12-06 15:48:05.550125949 +0000 UTC m=+964.083480350" watchObservedRunningTime="2025-12-06 15:48:05.55337604 +0000 UTC m=+964.086730431" Dec 06 15:48:06 crc kubenswrapper[5003]: I1206 15:48:06.604194 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-jbzsr" Dec 06 15:48:06 crc kubenswrapper[5003]: I1206 15:48:06.604240 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-jbzsr" Dec 06 15:48:06 crc kubenswrapper[5003]: I1206 15:48:06.661473 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-jbzsr" Dec 06 15:48:07 crc kubenswrapper[5003]: I1206 15:48:07.579030 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-jbzsr" Dec 06 15:48:08 crc kubenswrapper[5003]: I1206 15:48:08.054621 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-jbzsr"] Dec 06 15:48:09 crc kubenswrapper[5003]: I1206 15:48:09.539739 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-jbzsr" podUID="e257780d-b2bd-44c0-8238-898a53ae4cb3" containerName="registry-server" containerID="cri-o://4bfc334444437590f68151e50877adc614ff6b0011ffa7a56892cb4cd29ee25b" gracePeriod=2 Dec 06 15:48:10 crc kubenswrapper[5003]: I1206 15:48:10.548806 5003 generic.go:334] "Generic (PLEG): container finished" podID="e257780d-b2bd-44c0-8238-898a53ae4cb3" containerID="4bfc334444437590f68151e50877adc614ff6b0011ffa7a56892cb4cd29ee25b" exitCode=0 Dec 06 15:48:10 crc kubenswrapper[5003]: I1206 15:48:10.548864 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jbzsr" event={"ID":"e257780d-b2bd-44c0-8238-898a53ae4cb3","Type":"ContainerDied","Data":"4bfc334444437590f68151e50877adc614ff6b0011ffa7a56892cb4cd29ee25b"} Dec 06 15:48:10 crc kubenswrapper[5003]: I1206 15:48:10.573295 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/mariadb-operator-controller-manager-6fd8f69c54-5f56z" Dec 06 15:48:11 crc kubenswrapper[5003]: I1206 15:48:11.061057 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-jbzsr" Dec 06 15:48:11 crc kubenswrapper[5003]: I1206 15:48:11.254644 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kbkjs\" (UniqueName: \"kubernetes.io/projected/e257780d-b2bd-44c0-8238-898a53ae4cb3-kube-api-access-kbkjs\") pod \"e257780d-b2bd-44c0-8238-898a53ae4cb3\" (UID: \"e257780d-b2bd-44c0-8238-898a53ae4cb3\") " Dec 06 15:48:11 crc kubenswrapper[5003]: I1206 15:48:11.254735 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e257780d-b2bd-44c0-8238-898a53ae4cb3-catalog-content\") pod \"e257780d-b2bd-44c0-8238-898a53ae4cb3\" (UID: \"e257780d-b2bd-44c0-8238-898a53ae4cb3\") " Dec 06 15:48:11 crc kubenswrapper[5003]: I1206 15:48:11.254836 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e257780d-b2bd-44c0-8238-898a53ae4cb3-utilities\") pod \"e257780d-b2bd-44c0-8238-898a53ae4cb3\" (UID: \"e257780d-b2bd-44c0-8238-898a53ae4cb3\") " Dec 06 15:48:11 crc kubenswrapper[5003]: I1206 15:48:11.255855 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e257780d-b2bd-44c0-8238-898a53ae4cb3-utilities" (OuterVolumeSpecName: "utilities") pod "e257780d-b2bd-44c0-8238-898a53ae4cb3" (UID: "e257780d-b2bd-44c0-8238-898a53ae4cb3"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 15:48:11 crc kubenswrapper[5003]: I1206 15:48:11.266799 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e257780d-b2bd-44c0-8238-898a53ae4cb3-kube-api-access-kbkjs" (OuterVolumeSpecName: "kube-api-access-kbkjs") pod "e257780d-b2bd-44c0-8238-898a53ae4cb3" (UID: "e257780d-b2bd-44c0-8238-898a53ae4cb3"). InnerVolumeSpecName "kube-api-access-kbkjs". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 15:48:11 crc kubenswrapper[5003]: I1206 15:48:11.301105 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e257780d-b2bd-44c0-8238-898a53ae4cb3-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "e257780d-b2bd-44c0-8238-898a53ae4cb3" (UID: "e257780d-b2bd-44c0-8238-898a53ae4cb3"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 15:48:11 crc kubenswrapper[5003]: I1206 15:48:11.356849 5003 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e257780d-b2bd-44c0-8238-898a53ae4cb3-utilities\") on node \"crc\" DevicePath \"\"" Dec 06 15:48:11 crc kubenswrapper[5003]: I1206 15:48:11.356890 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kbkjs\" (UniqueName: \"kubernetes.io/projected/e257780d-b2bd-44c0-8238-898a53ae4cb3-kube-api-access-kbkjs\") on node \"crc\" DevicePath \"\"" Dec 06 15:48:11 crc kubenswrapper[5003]: I1206 15:48:11.356930 5003 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e257780d-b2bd-44c0-8238-898a53ae4cb3-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 06 15:48:11 crc kubenswrapper[5003]: I1206 15:48:11.576393 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jbzsr" event={"ID":"e257780d-b2bd-44c0-8238-898a53ae4cb3","Type":"ContainerDied","Data":"5d762f35e5f3e24d648ae5f81ac3b4b4c49d4129a5d029fccbc86581f9b02862"} Dec 06 15:48:11 crc kubenswrapper[5003]: I1206 15:48:11.576561 5003 scope.go:117] "RemoveContainer" containerID="4bfc334444437590f68151e50877adc614ff6b0011ffa7a56892cb4cd29ee25b" Dec 06 15:48:11 crc kubenswrapper[5003]: I1206 15:48:11.576598 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-jbzsr" Dec 06 15:48:11 crc kubenswrapper[5003]: I1206 15:48:11.620117 5003 scope.go:117] "RemoveContainer" containerID="442876e36d919d279a7c39272afe7230173643c1f06ba1f86b84922fa033f1bb" Dec 06 15:48:11 crc kubenswrapper[5003]: I1206 15:48:11.654671 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-jbzsr"] Dec 06 15:48:11 crc kubenswrapper[5003]: I1206 15:48:11.657685 5003 scope.go:117] "RemoveContainer" containerID="df8f4b467c3b88a76ec1a68505251850e64dbd91b9546a738fd33dce5d1c7435" Dec 06 15:48:11 crc kubenswrapper[5003]: I1206 15:48:11.666765 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-jbzsr"] Dec 06 15:48:11 crc kubenswrapper[5003]: I1206 15:48:11.725183 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e257780d-b2bd-44c0-8238-898a53ae4cb3" path="/var/lib/kubelet/pods/e257780d-b2bd-44c0-8238-898a53ae4cb3/volumes" Dec 06 15:48:17 crc kubenswrapper[5003]: I1206 15:48:17.679268 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/infra-operator-index-2ht4h"] Dec 06 15:48:17 crc kubenswrapper[5003]: E1206 15:48:17.680133 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e257780d-b2bd-44c0-8238-898a53ae4cb3" containerName="registry-server" Dec 06 15:48:17 crc kubenswrapper[5003]: I1206 15:48:17.680149 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="e257780d-b2bd-44c0-8238-898a53ae4cb3" containerName="registry-server" Dec 06 15:48:17 crc kubenswrapper[5003]: E1206 15:48:17.680171 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e257780d-b2bd-44c0-8238-898a53ae4cb3" containerName="extract-content" Dec 06 15:48:17 crc kubenswrapper[5003]: I1206 15:48:17.680182 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="e257780d-b2bd-44c0-8238-898a53ae4cb3" containerName="extract-content" Dec 06 15:48:17 crc kubenswrapper[5003]: E1206 15:48:17.680194 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e257780d-b2bd-44c0-8238-898a53ae4cb3" containerName="extract-utilities" Dec 06 15:48:17 crc kubenswrapper[5003]: I1206 15:48:17.680202 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="e257780d-b2bd-44c0-8238-898a53ae4cb3" containerName="extract-utilities" Dec 06 15:48:17 crc kubenswrapper[5003]: I1206 15:48:17.680322 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="e257780d-b2bd-44c0-8238-898a53ae4cb3" containerName="registry-server" Dec 06 15:48:17 crc kubenswrapper[5003]: I1206 15:48:17.680835 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-index-2ht4h" Dec 06 15:48:17 crc kubenswrapper[5003]: I1206 15:48:17.683335 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-index-dockercfg-bxtj2" Dec 06 15:48:17 crc kubenswrapper[5003]: I1206 15:48:17.693985 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-index-2ht4h"] Dec 06 15:48:17 crc kubenswrapper[5003]: I1206 15:48:17.859915 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l4vp8\" (UniqueName: \"kubernetes.io/projected/aedbe2bf-a5c7-42dc-80b8-2e6fe7ee9c6c-kube-api-access-l4vp8\") pod \"infra-operator-index-2ht4h\" (UID: \"aedbe2bf-a5c7-42dc-80b8-2e6fe7ee9c6c\") " pod="openstack-operators/infra-operator-index-2ht4h" Dec 06 15:48:17 crc kubenswrapper[5003]: I1206 15:48:17.961443 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l4vp8\" (UniqueName: \"kubernetes.io/projected/aedbe2bf-a5c7-42dc-80b8-2e6fe7ee9c6c-kube-api-access-l4vp8\") pod \"infra-operator-index-2ht4h\" (UID: \"aedbe2bf-a5c7-42dc-80b8-2e6fe7ee9c6c\") " pod="openstack-operators/infra-operator-index-2ht4h" Dec 06 15:48:17 crc kubenswrapper[5003]: I1206 15:48:17.980075 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l4vp8\" (UniqueName: \"kubernetes.io/projected/aedbe2bf-a5c7-42dc-80b8-2e6fe7ee9c6c-kube-api-access-l4vp8\") pod \"infra-operator-index-2ht4h\" (UID: \"aedbe2bf-a5c7-42dc-80b8-2e6fe7ee9c6c\") " pod="openstack-operators/infra-operator-index-2ht4h" Dec 06 15:48:17 crc kubenswrapper[5003]: I1206 15:48:17.997905 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-index-2ht4h" Dec 06 15:48:18 crc kubenswrapper[5003]: I1206 15:48:18.296325 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-index-2ht4h"] Dec 06 15:48:18 crc kubenswrapper[5003]: I1206 15:48:18.572935 5003 patch_prober.go:28] interesting pod/machine-config-daemon-w25db container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 06 15:48:18 crc kubenswrapper[5003]: I1206 15:48:18.572999 5003 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-w25db" podUID="1a047c4d-003e-4668-9b96-945eab34ab68" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 06 15:48:18 crc kubenswrapper[5003]: I1206 15:48:18.573051 5003 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-w25db" Dec 06 15:48:18 crc kubenswrapper[5003]: I1206 15:48:18.573583 5003 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"bf4c0e939e0839bd8579c450bf673f46cb54e6312b28fa28edd3fa3c1fe6713b"} pod="openshift-machine-config-operator/machine-config-daemon-w25db" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 06 15:48:18 crc kubenswrapper[5003]: I1206 15:48:18.573679 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-w25db" podUID="1a047c4d-003e-4668-9b96-945eab34ab68" containerName="machine-config-daemon" containerID="cri-o://bf4c0e939e0839bd8579c450bf673f46cb54e6312b28fa28edd3fa3c1fe6713b" gracePeriod=600 Dec 06 15:48:18 crc kubenswrapper[5003]: I1206 15:48:18.616523 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-index-2ht4h" event={"ID":"aedbe2bf-a5c7-42dc-80b8-2e6fe7ee9c6c","Type":"ContainerStarted","Data":"7380844aedd250e7265da4f3cdabc49e9495a0c6add87b61790bbb34b0c25938"} Dec 06 15:48:19 crc kubenswrapper[5003]: I1206 15:48:19.624799 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-index-2ht4h" event={"ID":"aedbe2bf-a5c7-42dc-80b8-2e6fe7ee9c6c","Type":"ContainerStarted","Data":"caeb0bee3885eb2551e9ac668861d8d908688d44cd4b75a62dc38ab441e0633c"} Dec 06 15:48:19 crc kubenswrapper[5003]: I1206 15:48:19.635370 5003 generic.go:334] "Generic (PLEG): container finished" podID="1a047c4d-003e-4668-9b96-945eab34ab68" containerID="bf4c0e939e0839bd8579c450bf673f46cb54e6312b28fa28edd3fa3c1fe6713b" exitCode=0 Dec 06 15:48:19 crc kubenswrapper[5003]: I1206 15:48:19.635425 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-w25db" event={"ID":"1a047c4d-003e-4668-9b96-945eab34ab68","Type":"ContainerDied","Data":"bf4c0e939e0839bd8579c450bf673f46cb54e6312b28fa28edd3fa3c1fe6713b"} Dec 06 15:48:19 crc kubenswrapper[5003]: I1206 15:48:19.635451 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-w25db" event={"ID":"1a047c4d-003e-4668-9b96-945eab34ab68","Type":"ContainerStarted","Data":"78b1e361c0889fb22d06542ab25b57331309a42111ebfeb58f0849e826b8ef88"} Dec 06 15:48:19 crc kubenswrapper[5003]: I1206 15:48:19.635466 5003 scope.go:117] "RemoveContainer" containerID="be0c79caa5d9dd2c20871e4f314e65be4eca0cd4c85743186922c15f8ac77ad4" Dec 06 15:48:19 crc kubenswrapper[5003]: I1206 15:48:19.666688 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/infra-operator-index-2ht4h" podStartSLOduration=1.928333559 podStartE2EDuration="2.666661688s" podCreationTimestamp="2025-12-06 15:48:17 +0000 UTC" firstStartedPulling="2025-12-06 15:48:18.307028455 +0000 UTC m=+976.840382836" lastFinishedPulling="2025-12-06 15:48:19.045356584 +0000 UTC m=+977.578710965" observedRunningTime="2025-12-06 15:48:19.647053148 +0000 UTC m=+978.180407529" watchObservedRunningTime="2025-12-06 15:48:19.666661688 +0000 UTC m=+978.200016109" Dec 06 15:48:28 crc kubenswrapper[5003]: I1206 15:48:27.999853 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-operators/infra-operator-index-2ht4h" Dec 06 15:48:28 crc kubenswrapper[5003]: I1206 15:48:28.000614 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/infra-operator-index-2ht4h" Dec 06 15:48:28 crc kubenswrapper[5003]: I1206 15:48:28.036085 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-operators/infra-operator-index-2ht4h" Dec 06 15:48:28 crc kubenswrapper[5003]: I1206 15:48:28.756747 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/infra-operator-index-2ht4h" Dec 06 15:48:34 crc kubenswrapper[5003]: I1206 15:48:34.922334 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/7c8268737cab4bb480b8c6360540a07b7a71682e502fa0b9fb67d6a17a98tzf"] Dec 06 15:48:34 crc kubenswrapper[5003]: I1206 15:48:34.925513 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/7c8268737cab4bb480b8c6360540a07b7a71682e502fa0b9fb67d6a17a98tzf" Dec 06 15:48:34 crc kubenswrapper[5003]: I1206 15:48:34.930045 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"default-dockercfg-kqjk5" Dec 06 15:48:34 crc kubenswrapper[5003]: I1206 15:48:34.935864 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/7c8268737cab4bb480b8c6360540a07b7a71682e502fa0b9fb67d6a17a98tzf"] Dec 06 15:48:35 crc kubenswrapper[5003]: I1206 15:48:35.114120 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/1d0165ce-9e29-4ace-8efe-4b89e00318f6-util\") pod \"7c8268737cab4bb480b8c6360540a07b7a71682e502fa0b9fb67d6a17a98tzf\" (UID: \"1d0165ce-9e29-4ace-8efe-4b89e00318f6\") " pod="openstack-operators/7c8268737cab4bb480b8c6360540a07b7a71682e502fa0b9fb67d6a17a98tzf" Dec 06 15:48:35 crc kubenswrapper[5003]: I1206 15:48:35.114444 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dp8xh\" (UniqueName: \"kubernetes.io/projected/1d0165ce-9e29-4ace-8efe-4b89e00318f6-kube-api-access-dp8xh\") pod \"7c8268737cab4bb480b8c6360540a07b7a71682e502fa0b9fb67d6a17a98tzf\" (UID: \"1d0165ce-9e29-4ace-8efe-4b89e00318f6\") " pod="openstack-operators/7c8268737cab4bb480b8c6360540a07b7a71682e502fa0b9fb67d6a17a98tzf" Dec 06 15:48:35 crc kubenswrapper[5003]: I1206 15:48:35.114513 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/1d0165ce-9e29-4ace-8efe-4b89e00318f6-bundle\") pod \"7c8268737cab4bb480b8c6360540a07b7a71682e502fa0b9fb67d6a17a98tzf\" (UID: \"1d0165ce-9e29-4ace-8efe-4b89e00318f6\") " pod="openstack-operators/7c8268737cab4bb480b8c6360540a07b7a71682e502fa0b9fb67d6a17a98tzf" Dec 06 15:48:35 crc kubenswrapper[5003]: I1206 15:48:35.216049 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/1d0165ce-9e29-4ace-8efe-4b89e00318f6-util\") pod \"7c8268737cab4bb480b8c6360540a07b7a71682e502fa0b9fb67d6a17a98tzf\" (UID: \"1d0165ce-9e29-4ace-8efe-4b89e00318f6\") " pod="openstack-operators/7c8268737cab4bb480b8c6360540a07b7a71682e502fa0b9fb67d6a17a98tzf" Dec 06 15:48:35 crc kubenswrapper[5003]: I1206 15:48:35.216287 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dp8xh\" (UniqueName: \"kubernetes.io/projected/1d0165ce-9e29-4ace-8efe-4b89e00318f6-kube-api-access-dp8xh\") pod \"7c8268737cab4bb480b8c6360540a07b7a71682e502fa0b9fb67d6a17a98tzf\" (UID: \"1d0165ce-9e29-4ace-8efe-4b89e00318f6\") " pod="openstack-operators/7c8268737cab4bb480b8c6360540a07b7a71682e502fa0b9fb67d6a17a98tzf" Dec 06 15:48:35 crc kubenswrapper[5003]: I1206 15:48:35.216431 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/1d0165ce-9e29-4ace-8efe-4b89e00318f6-bundle\") pod \"7c8268737cab4bb480b8c6360540a07b7a71682e502fa0b9fb67d6a17a98tzf\" (UID: \"1d0165ce-9e29-4ace-8efe-4b89e00318f6\") " pod="openstack-operators/7c8268737cab4bb480b8c6360540a07b7a71682e502fa0b9fb67d6a17a98tzf" Dec 06 15:48:35 crc kubenswrapper[5003]: I1206 15:48:35.216512 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/1d0165ce-9e29-4ace-8efe-4b89e00318f6-util\") pod \"7c8268737cab4bb480b8c6360540a07b7a71682e502fa0b9fb67d6a17a98tzf\" (UID: \"1d0165ce-9e29-4ace-8efe-4b89e00318f6\") " pod="openstack-operators/7c8268737cab4bb480b8c6360540a07b7a71682e502fa0b9fb67d6a17a98tzf" Dec 06 15:48:35 crc kubenswrapper[5003]: I1206 15:48:35.216749 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/1d0165ce-9e29-4ace-8efe-4b89e00318f6-bundle\") pod \"7c8268737cab4bb480b8c6360540a07b7a71682e502fa0b9fb67d6a17a98tzf\" (UID: \"1d0165ce-9e29-4ace-8efe-4b89e00318f6\") " pod="openstack-operators/7c8268737cab4bb480b8c6360540a07b7a71682e502fa0b9fb67d6a17a98tzf" Dec 06 15:48:35 crc kubenswrapper[5003]: I1206 15:48:35.238818 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dp8xh\" (UniqueName: \"kubernetes.io/projected/1d0165ce-9e29-4ace-8efe-4b89e00318f6-kube-api-access-dp8xh\") pod \"7c8268737cab4bb480b8c6360540a07b7a71682e502fa0b9fb67d6a17a98tzf\" (UID: \"1d0165ce-9e29-4ace-8efe-4b89e00318f6\") " pod="openstack-operators/7c8268737cab4bb480b8c6360540a07b7a71682e502fa0b9fb67d6a17a98tzf" Dec 06 15:48:35 crc kubenswrapper[5003]: I1206 15:48:35.244399 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/7c8268737cab4bb480b8c6360540a07b7a71682e502fa0b9fb67d6a17a98tzf" Dec 06 15:48:35 crc kubenswrapper[5003]: I1206 15:48:35.450793 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/7c8268737cab4bb480b8c6360540a07b7a71682e502fa0b9fb67d6a17a98tzf"] Dec 06 15:48:35 crc kubenswrapper[5003]: I1206 15:48:35.760535 5003 generic.go:334] "Generic (PLEG): container finished" podID="1d0165ce-9e29-4ace-8efe-4b89e00318f6" containerID="168cb895292e1f27f56cc10297cefd2eccb5009267495e74fc09d888768d1613" exitCode=0 Dec 06 15:48:35 crc kubenswrapper[5003]: I1206 15:48:35.760590 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/7c8268737cab4bb480b8c6360540a07b7a71682e502fa0b9fb67d6a17a98tzf" event={"ID":"1d0165ce-9e29-4ace-8efe-4b89e00318f6","Type":"ContainerDied","Data":"168cb895292e1f27f56cc10297cefd2eccb5009267495e74fc09d888768d1613"} Dec 06 15:48:35 crc kubenswrapper[5003]: I1206 15:48:35.760618 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/7c8268737cab4bb480b8c6360540a07b7a71682e502fa0b9fb67d6a17a98tzf" event={"ID":"1d0165ce-9e29-4ace-8efe-4b89e00318f6","Type":"ContainerStarted","Data":"1985f08586e36d869f799c8e2248cb8cfab962ccf0edc9fcae120ecf11e87643"} Dec 06 15:48:36 crc kubenswrapper[5003]: I1206 15:48:36.769667 5003 generic.go:334] "Generic (PLEG): container finished" podID="1d0165ce-9e29-4ace-8efe-4b89e00318f6" containerID="8267da1378a67e96a4b47444d38b31927526226b6024806834287a7fc1acb5b1" exitCode=0 Dec 06 15:48:36 crc kubenswrapper[5003]: I1206 15:48:36.769809 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/7c8268737cab4bb480b8c6360540a07b7a71682e502fa0b9fb67d6a17a98tzf" event={"ID":"1d0165ce-9e29-4ace-8efe-4b89e00318f6","Type":"ContainerDied","Data":"8267da1378a67e96a4b47444d38b31927526226b6024806834287a7fc1acb5b1"} Dec 06 15:48:37 crc kubenswrapper[5003]: I1206 15:48:37.776679 5003 generic.go:334] "Generic (PLEG): container finished" podID="1d0165ce-9e29-4ace-8efe-4b89e00318f6" containerID="ebd784c96d4ff0bdd3b40fde0d567beae42f4ad02e2fe4f00d51eb631e82d81f" exitCode=0 Dec 06 15:48:37 crc kubenswrapper[5003]: I1206 15:48:37.776739 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/7c8268737cab4bb480b8c6360540a07b7a71682e502fa0b9fb67d6a17a98tzf" event={"ID":"1d0165ce-9e29-4ace-8efe-4b89e00318f6","Type":"ContainerDied","Data":"ebd784c96d4ff0bdd3b40fde0d567beae42f4ad02e2fe4f00d51eb631e82d81f"} Dec 06 15:48:39 crc kubenswrapper[5003]: I1206 15:48:39.018574 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/7c8268737cab4bb480b8c6360540a07b7a71682e502fa0b9fb67d6a17a98tzf" Dec 06 15:48:39 crc kubenswrapper[5003]: I1206 15:48:39.167841 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/1d0165ce-9e29-4ace-8efe-4b89e00318f6-util\") pod \"1d0165ce-9e29-4ace-8efe-4b89e00318f6\" (UID: \"1d0165ce-9e29-4ace-8efe-4b89e00318f6\") " Dec 06 15:48:39 crc kubenswrapper[5003]: I1206 15:48:39.168010 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dp8xh\" (UniqueName: \"kubernetes.io/projected/1d0165ce-9e29-4ace-8efe-4b89e00318f6-kube-api-access-dp8xh\") pod \"1d0165ce-9e29-4ace-8efe-4b89e00318f6\" (UID: \"1d0165ce-9e29-4ace-8efe-4b89e00318f6\") " Dec 06 15:48:39 crc kubenswrapper[5003]: I1206 15:48:39.168660 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/1d0165ce-9e29-4ace-8efe-4b89e00318f6-bundle\") pod \"1d0165ce-9e29-4ace-8efe-4b89e00318f6\" (UID: \"1d0165ce-9e29-4ace-8efe-4b89e00318f6\") " Dec 06 15:48:39 crc kubenswrapper[5003]: I1206 15:48:39.171281 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d0165ce-9e29-4ace-8efe-4b89e00318f6-bundle" (OuterVolumeSpecName: "bundle") pod "1d0165ce-9e29-4ace-8efe-4b89e00318f6" (UID: "1d0165ce-9e29-4ace-8efe-4b89e00318f6"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 15:48:39 crc kubenswrapper[5003]: I1206 15:48:39.173904 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d0165ce-9e29-4ace-8efe-4b89e00318f6-kube-api-access-dp8xh" (OuterVolumeSpecName: "kube-api-access-dp8xh") pod "1d0165ce-9e29-4ace-8efe-4b89e00318f6" (UID: "1d0165ce-9e29-4ace-8efe-4b89e00318f6"). InnerVolumeSpecName "kube-api-access-dp8xh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 15:48:39 crc kubenswrapper[5003]: I1206 15:48:39.182321 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d0165ce-9e29-4ace-8efe-4b89e00318f6-util" (OuterVolumeSpecName: "util") pod "1d0165ce-9e29-4ace-8efe-4b89e00318f6" (UID: "1d0165ce-9e29-4ace-8efe-4b89e00318f6"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 15:48:39 crc kubenswrapper[5003]: I1206 15:48:39.270612 5003 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/1d0165ce-9e29-4ace-8efe-4b89e00318f6-bundle\") on node \"crc\" DevicePath \"\"" Dec 06 15:48:39 crc kubenswrapper[5003]: I1206 15:48:39.270642 5003 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/1d0165ce-9e29-4ace-8efe-4b89e00318f6-util\") on node \"crc\" DevicePath \"\"" Dec 06 15:48:39 crc kubenswrapper[5003]: I1206 15:48:39.270651 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dp8xh\" (UniqueName: \"kubernetes.io/projected/1d0165ce-9e29-4ace-8efe-4b89e00318f6-kube-api-access-dp8xh\") on node \"crc\" DevicePath \"\"" Dec 06 15:48:39 crc kubenswrapper[5003]: I1206 15:48:39.795010 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/7c8268737cab4bb480b8c6360540a07b7a71682e502fa0b9fb67d6a17a98tzf" event={"ID":"1d0165ce-9e29-4ace-8efe-4b89e00318f6","Type":"ContainerDied","Data":"1985f08586e36d869f799c8e2248cb8cfab962ccf0edc9fcae120ecf11e87643"} Dec 06 15:48:39 crc kubenswrapper[5003]: I1206 15:48:39.795086 5003 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1985f08586e36d869f799c8e2248cb8cfab962ccf0edc9fcae120ecf11e87643" Dec 06 15:48:39 crc kubenswrapper[5003]: I1206 15:48:39.795128 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/7c8268737cab4bb480b8c6360540a07b7a71682e502fa0b9fb67d6a17a98tzf" Dec 06 15:48:53 crc kubenswrapper[5003]: I1206 15:48:53.664374 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/infra-operator-controller-manager-5b58584fcf-h8rqq"] Dec 06 15:48:53 crc kubenswrapper[5003]: E1206 15:48:53.665199 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1d0165ce-9e29-4ace-8efe-4b89e00318f6" containerName="util" Dec 06 15:48:53 crc kubenswrapper[5003]: I1206 15:48:53.665213 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="1d0165ce-9e29-4ace-8efe-4b89e00318f6" containerName="util" Dec 06 15:48:53 crc kubenswrapper[5003]: E1206 15:48:53.665228 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1d0165ce-9e29-4ace-8efe-4b89e00318f6" containerName="extract" Dec 06 15:48:53 crc kubenswrapper[5003]: I1206 15:48:53.665235 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="1d0165ce-9e29-4ace-8efe-4b89e00318f6" containerName="extract" Dec 06 15:48:53 crc kubenswrapper[5003]: E1206 15:48:53.665246 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1d0165ce-9e29-4ace-8efe-4b89e00318f6" containerName="pull" Dec 06 15:48:53 crc kubenswrapper[5003]: I1206 15:48:53.665253 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="1d0165ce-9e29-4ace-8efe-4b89e00318f6" containerName="pull" Dec 06 15:48:53 crc kubenswrapper[5003]: I1206 15:48:53.665386 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="1d0165ce-9e29-4ace-8efe-4b89e00318f6" containerName="extract" Dec 06 15:48:53 crc kubenswrapper[5003]: I1206 15:48:53.665993 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-5b58584fcf-h8rqq" Dec 06 15:48:53 crc kubenswrapper[5003]: I1206 15:48:53.667886 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-controller-manager-service-cert" Dec 06 15:48:53 crc kubenswrapper[5003]: I1206 15:48:53.668071 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-controller-manager-dockercfg-2wbbn" Dec 06 15:48:53 crc kubenswrapper[5003]: I1206 15:48:53.669267 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/0f4eeada-366f-4403-bd3e-54235105ef11-apiservice-cert\") pod \"infra-operator-controller-manager-5b58584fcf-h8rqq\" (UID: \"0f4eeada-366f-4403-bd3e-54235105ef11\") " pod="openstack-operators/infra-operator-controller-manager-5b58584fcf-h8rqq" Dec 06 15:48:53 crc kubenswrapper[5003]: I1206 15:48:53.669342 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9d4xl\" (UniqueName: \"kubernetes.io/projected/0f4eeada-366f-4403-bd3e-54235105ef11-kube-api-access-9d4xl\") pod \"infra-operator-controller-manager-5b58584fcf-h8rqq\" (UID: \"0f4eeada-366f-4403-bd3e-54235105ef11\") " pod="openstack-operators/infra-operator-controller-manager-5b58584fcf-h8rqq" Dec 06 15:48:53 crc kubenswrapper[5003]: I1206 15:48:53.669370 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/0f4eeada-366f-4403-bd3e-54235105ef11-webhook-cert\") pod \"infra-operator-controller-manager-5b58584fcf-h8rqq\" (UID: \"0f4eeada-366f-4403-bd3e-54235105ef11\") " pod="openstack-operators/infra-operator-controller-manager-5b58584fcf-h8rqq" Dec 06 15:48:53 crc kubenswrapper[5003]: I1206 15:48:53.679088 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-5b58584fcf-h8rqq"] Dec 06 15:48:53 crc kubenswrapper[5003]: I1206 15:48:53.770171 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9d4xl\" (UniqueName: \"kubernetes.io/projected/0f4eeada-366f-4403-bd3e-54235105ef11-kube-api-access-9d4xl\") pod \"infra-operator-controller-manager-5b58584fcf-h8rqq\" (UID: \"0f4eeada-366f-4403-bd3e-54235105ef11\") " pod="openstack-operators/infra-operator-controller-manager-5b58584fcf-h8rqq" Dec 06 15:48:53 crc kubenswrapper[5003]: I1206 15:48:53.770222 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/0f4eeada-366f-4403-bd3e-54235105ef11-webhook-cert\") pod \"infra-operator-controller-manager-5b58584fcf-h8rqq\" (UID: \"0f4eeada-366f-4403-bd3e-54235105ef11\") " pod="openstack-operators/infra-operator-controller-manager-5b58584fcf-h8rqq" Dec 06 15:48:53 crc kubenswrapper[5003]: I1206 15:48:53.770289 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/0f4eeada-366f-4403-bd3e-54235105ef11-apiservice-cert\") pod \"infra-operator-controller-manager-5b58584fcf-h8rqq\" (UID: \"0f4eeada-366f-4403-bd3e-54235105ef11\") " pod="openstack-operators/infra-operator-controller-manager-5b58584fcf-h8rqq" Dec 06 15:48:53 crc kubenswrapper[5003]: I1206 15:48:53.776538 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/0f4eeada-366f-4403-bd3e-54235105ef11-apiservice-cert\") pod \"infra-operator-controller-manager-5b58584fcf-h8rqq\" (UID: \"0f4eeada-366f-4403-bd3e-54235105ef11\") " pod="openstack-operators/infra-operator-controller-manager-5b58584fcf-h8rqq" Dec 06 15:48:53 crc kubenswrapper[5003]: I1206 15:48:53.776631 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/0f4eeada-366f-4403-bd3e-54235105ef11-webhook-cert\") pod \"infra-operator-controller-manager-5b58584fcf-h8rqq\" (UID: \"0f4eeada-366f-4403-bd3e-54235105ef11\") " pod="openstack-operators/infra-operator-controller-manager-5b58584fcf-h8rqq" Dec 06 15:48:53 crc kubenswrapper[5003]: I1206 15:48:53.785348 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9d4xl\" (UniqueName: \"kubernetes.io/projected/0f4eeada-366f-4403-bd3e-54235105ef11-kube-api-access-9d4xl\") pod \"infra-operator-controller-manager-5b58584fcf-h8rqq\" (UID: \"0f4eeada-366f-4403-bd3e-54235105ef11\") " pod="openstack-operators/infra-operator-controller-manager-5b58584fcf-h8rqq" Dec 06 15:48:53 crc kubenswrapper[5003]: I1206 15:48:53.988782 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-5b58584fcf-h8rqq" Dec 06 15:48:54 crc kubenswrapper[5003]: I1206 15:48:54.423446 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-5b58584fcf-h8rqq"] Dec 06 15:48:54 crc kubenswrapper[5003]: W1206 15:48:54.433805 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0f4eeada_366f_4403_bd3e_54235105ef11.slice/crio-e811cbf39e07661ffafd77652084a7946bc5f11405ba820b65723942331e3cd5 WatchSource:0}: Error finding container e811cbf39e07661ffafd77652084a7946bc5f11405ba820b65723942331e3cd5: Status 404 returned error can't find the container with id e811cbf39e07661ffafd77652084a7946bc5f11405ba820b65723942331e3cd5 Dec 06 15:48:54 crc kubenswrapper[5003]: I1206 15:48:54.889772 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-5b58584fcf-h8rqq" event={"ID":"0f4eeada-366f-4403-bd3e-54235105ef11","Type":"ContainerStarted","Data":"e811cbf39e07661ffafd77652084a7946bc5f11405ba820b65723942331e3cd5"} Dec 06 15:48:56 crc kubenswrapper[5003]: I1206 15:48:56.176705 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["horizon-kuttl-tests/openstack-galera-0"] Dec 06 15:48:56 crc kubenswrapper[5003]: I1206 15:48:56.180060 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="horizon-kuttl-tests/openstack-galera-0" Dec 06 15:48:56 crc kubenswrapper[5003]: I1206 15:48:56.185415 5003 reflector.go:368] Caches populated for *v1.Secret from object-"horizon-kuttl-tests"/"galera-openstack-dockercfg-5h6p7" Dec 06 15:48:56 crc kubenswrapper[5003]: I1206 15:48:56.185737 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"horizon-kuttl-tests"/"openstack-scripts" Dec 06 15:48:56 crc kubenswrapper[5003]: I1206 15:48:56.186070 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"horizon-kuttl-tests"/"kube-root-ca.crt" Dec 06 15:48:56 crc kubenswrapper[5003]: I1206 15:48:56.186260 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"horizon-kuttl-tests"/"openshift-service-ca.crt" Dec 06 15:48:56 crc kubenswrapper[5003]: I1206 15:48:56.186449 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"horizon-kuttl-tests"/"openstack-config-data" Dec 06 15:48:56 crc kubenswrapper[5003]: I1206 15:48:56.191196 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["horizon-kuttl-tests/openstack-galera-1"] Dec 06 15:48:56 crc kubenswrapper[5003]: I1206 15:48:56.193346 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="horizon-kuttl-tests/openstack-galera-1" Dec 06 15:48:56 crc kubenswrapper[5003]: I1206 15:48:56.205987 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["horizon-kuttl-tests/openstack-galera-0"] Dec 06 15:48:56 crc kubenswrapper[5003]: I1206 15:48:56.212946 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["horizon-kuttl-tests/openstack-galera-2"] Dec 06 15:48:56 crc kubenswrapper[5003]: I1206 15:48:56.214999 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="horizon-kuttl-tests/openstack-galera-2" Dec 06 15:48:56 crc kubenswrapper[5003]: I1206 15:48:56.225397 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["horizon-kuttl-tests/openstack-galera-1"] Dec 06 15:48:56 crc kubenswrapper[5003]: I1206 15:48:56.231537 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["horizon-kuttl-tests/openstack-galera-2"] Dec 06 15:48:56 crc kubenswrapper[5003]: I1206 15:48:56.304427 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/6b45e98d-ae85-45e0-b565-11db9addcad3-config-data-default\") pod \"openstack-galera-0\" (UID: \"6b45e98d-ae85-45e0-b565-11db9addcad3\") " pod="horizon-kuttl-tests/openstack-galera-0" Dec 06 15:48:56 crc kubenswrapper[5003]: I1206 15:48:56.304486 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6b45e98d-ae85-45e0-b565-11db9addcad3-operator-scripts\") pod \"openstack-galera-0\" (UID: \"6b45e98d-ae85-45e0-b565-11db9addcad3\") " pod="horizon-kuttl-tests/openstack-galera-0" Dec 06 15:48:56 crc kubenswrapper[5003]: I1206 15:48:56.304528 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/72422d98-0d90-4087-8aae-e78370f932b1-kolla-config\") pod \"openstack-galera-1\" (UID: \"72422d98-0d90-4087-8aae-e78370f932b1\") " pod="horizon-kuttl-tests/openstack-galera-1" Dec 06 15:48:56 crc kubenswrapper[5003]: I1206 15:48:56.304590 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2f6l6\" (UniqueName: \"kubernetes.io/projected/72422d98-0d90-4087-8aae-e78370f932b1-kube-api-access-2f6l6\") pod \"openstack-galera-1\" (UID: \"72422d98-0d90-4087-8aae-e78370f932b1\") " pod="horizon-kuttl-tests/openstack-galera-1" Dec 06 15:48:56 crc kubenswrapper[5003]: I1206 15:48:56.304623 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/6b45e98d-ae85-45e0-b565-11db9addcad3-config-data-generated\") pod \"openstack-galera-0\" (UID: \"6b45e98d-ae85-45e0-b565-11db9addcad3\") " pod="horizon-kuttl-tests/openstack-galera-0" Dec 06 15:48:56 crc kubenswrapper[5003]: I1206 15:48:56.304641 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/72422d98-0d90-4087-8aae-e78370f932b1-config-data-generated\") pod \"openstack-galera-1\" (UID: \"72422d98-0d90-4087-8aae-e78370f932b1\") " pod="horizon-kuttl-tests/openstack-galera-1" Dec 06 15:48:56 crc kubenswrapper[5003]: I1206 15:48:56.304658 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/72422d98-0d90-4087-8aae-e78370f932b1-config-data-default\") pod \"openstack-galera-1\" (UID: \"72422d98-0d90-4087-8aae-e78370f932b1\") " pod="horizon-kuttl-tests/openstack-galera-1" Dec 06 15:48:56 crc kubenswrapper[5003]: I1206 15:48:56.304701 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"openstack-galera-0\" (UID: \"6b45e98d-ae85-45e0-b565-11db9addcad3\") " pod="horizon-kuttl-tests/openstack-galera-0" Dec 06 15:48:56 crc kubenswrapper[5003]: I1206 15:48:56.304779 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/72422d98-0d90-4087-8aae-e78370f932b1-operator-scripts\") pod \"openstack-galera-1\" (UID: \"72422d98-0d90-4087-8aae-e78370f932b1\") " pod="horizon-kuttl-tests/openstack-galera-1" Dec 06 15:48:56 crc kubenswrapper[5003]: I1206 15:48:56.304826 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"openstack-galera-1\" (UID: \"72422d98-0d90-4087-8aae-e78370f932b1\") " pod="horizon-kuttl-tests/openstack-galera-1" Dec 06 15:48:56 crc kubenswrapper[5003]: I1206 15:48:56.304882 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/6b45e98d-ae85-45e0-b565-11db9addcad3-kolla-config\") pod \"openstack-galera-0\" (UID: \"6b45e98d-ae85-45e0-b565-11db9addcad3\") " pod="horizon-kuttl-tests/openstack-galera-0" Dec 06 15:48:56 crc kubenswrapper[5003]: I1206 15:48:56.304912 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bcl4k\" (UniqueName: \"kubernetes.io/projected/6b45e98d-ae85-45e0-b565-11db9addcad3-kube-api-access-bcl4k\") pod \"openstack-galera-0\" (UID: \"6b45e98d-ae85-45e0-b565-11db9addcad3\") " pod="horizon-kuttl-tests/openstack-galera-0" Dec 06 15:48:56 crc kubenswrapper[5003]: I1206 15:48:56.406024 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"openstack-galera-1\" (UID: \"72422d98-0d90-4087-8aae-e78370f932b1\") " pod="horizon-kuttl-tests/openstack-galera-1" Dec 06 15:48:56 crc kubenswrapper[5003]: I1206 15:48:56.406099 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/6433e1ae-768c-42ba-b961-4bd7bfba8701-config-data-generated\") pod \"openstack-galera-2\" (UID: \"6433e1ae-768c-42ba-b961-4bd7bfba8701\") " pod="horizon-kuttl-tests/openstack-galera-2" Dec 06 15:48:56 crc kubenswrapper[5003]: I1206 15:48:56.406127 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qknf2\" (UniqueName: \"kubernetes.io/projected/6433e1ae-768c-42ba-b961-4bd7bfba8701-kube-api-access-qknf2\") pod \"openstack-galera-2\" (UID: \"6433e1ae-768c-42ba-b961-4bd7bfba8701\") " pod="horizon-kuttl-tests/openstack-galera-2" Dec 06 15:48:56 crc kubenswrapper[5003]: I1206 15:48:56.406177 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/6b45e98d-ae85-45e0-b565-11db9addcad3-kolla-config\") pod \"openstack-galera-0\" (UID: \"6b45e98d-ae85-45e0-b565-11db9addcad3\") " pod="horizon-kuttl-tests/openstack-galera-0" Dec 06 15:48:56 crc kubenswrapper[5003]: I1206 15:48:56.406200 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bcl4k\" (UniqueName: \"kubernetes.io/projected/6b45e98d-ae85-45e0-b565-11db9addcad3-kube-api-access-bcl4k\") pod \"openstack-galera-0\" (UID: \"6b45e98d-ae85-45e0-b565-11db9addcad3\") " pod="horizon-kuttl-tests/openstack-galera-0" Dec 06 15:48:56 crc kubenswrapper[5003]: I1206 15:48:56.406230 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/6433e1ae-768c-42ba-b961-4bd7bfba8701-config-data-default\") pod \"openstack-galera-2\" (UID: \"6433e1ae-768c-42ba-b961-4bd7bfba8701\") " pod="horizon-kuttl-tests/openstack-galera-2" Dec 06 15:48:56 crc kubenswrapper[5003]: I1206 15:48:56.406252 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/6b45e98d-ae85-45e0-b565-11db9addcad3-config-data-default\") pod \"openstack-galera-0\" (UID: \"6b45e98d-ae85-45e0-b565-11db9addcad3\") " pod="horizon-kuttl-tests/openstack-galera-0" Dec 06 15:48:56 crc kubenswrapper[5003]: I1206 15:48:56.406278 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6b45e98d-ae85-45e0-b565-11db9addcad3-operator-scripts\") pod \"openstack-galera-0\" (UID: \"6b45e98d-ae85-45e0-b565-11db9addcad3\") " pod="horizon-kuttl-tests/openstack-galera-0" Dec 06 15:48:56 crc kubenswrapper[5003]: I1206 15:48:56.406307 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/72422d98-0d90-4087-8aae-e78370f932b1-kolla-config\") pod \"openstack-galera-1\" (UID: \"72422d98-0d90-4087-8aae-e78370f932b1\") " pod="horizon-kuttl-tests/openstack-galera-1" Dec 06 15:48:56 crc kubenswrapper[5003]: I1206 15:48:56.406433 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"openstack-galera-2\" (UID: \"6433e1ae-768c-42ba-b961-4bd7bfba8701\") " pod="horizon-kuttl-tests/openstack-galera-2" Dec 06 15:48:56 crc kubenswrapper[5003]: I1206 15:48:56.406533 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2f6l6\" (UniqueName: \"kubernetes.io/projected/72422d98-0d90-4087-8aae-e78370f932b1-kube-api-access-2f6l6\") pod \"openstack-galera-1\" (UID: \"72422d98-0d90-4087-8aae-e78370f932b1\") " pod="horizon-kuttl-tests/openstack-galera-1" Dec 06 15:48:56 crc kubenswrapper[5003]: I1206 15:48:56.406542 5003 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"openstack-galera-1\" (UID: \"72422d98-0d90-4087-8aae-e78370f932b1\") device mount path \"/mnt/openstack/pv07\"" pod="horizon-kuttl-tests/openstack-galera-1" Dec 06 15:48:56 crc kubenswrapper[5003]: I1206 15:48:56.406941 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/6b45e98d-ae85-45e0-b565-11db9addcad3-config-data-generated\") pod \"openstack-galera-0\" (UID: \"6b45e98d-ae85-45e0-b565-11db9addcad3\") " pod="horizon-kuttl-tests/openstack-galera-0" Dec 06 15:48:56 crc kubenswrapper[5003]: I1206 15:48:56.406985 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/72422d98-0d90-4087-8aae-e78370f932b1-config-data-generated\") pod \"openstack-galera-1\" (UID: \"72422d98-0d90-4087-8aae-e78370f932b1\") " pod="horizon-kuttl-tests/openstack-galera-1" Dec 06 15:48:56 crc kubenswrapper[5003]: I1206 15:48:56.407017 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/72422d98-0d90-4087-8aae-e78370f932b1-config-data-default\") pod \"openstack-galera-1\" (UID: \"72422d98-0d90-4087-8aae-e78370f932b1\") " pod="horizon-kuttl-tests/openstack-galera-1" Dec 06 15:48:56 crc kubenswrapper[5003]: I1206 15:48:56.407046 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"openstack-galera-0\" (UID: \"6b45e98d-ae85-45e0-b565-11db9addcad3\") " pod="horizon-kuttl-tests/openstack-galera-0" Dec 06 15:48:56 crc kubenswrapper[5003]: I1206 15:48:56.407081 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6433e1ae-768c-42ba-b961-4bd7bfba8701-operator-scripts\") pod \"openstack-galera-2\" (UID: \"6433e1ae-768c-42ba-b961-4bd7bfba8701\") " pod="horizon-kuttl-tests/openstack-galera-2" Dec 06 15:48:56 crc kubenswrapper[5003]: I1206 15:48:56.407114 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/72422d98-0d90-4087-8aae-e78370f932b1-operator-scripts\") pod \"openstack-galera-1\" (UID: \"72422d98-0d90-4087-8aae-e78370f932b1\") " pod="horizon-kuttl-tests/openstack-galera-1" Dec 06 15:48:56 crc kubenswrapper[5003]: I1206 15:48:56.407120 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/6b45e98d-ae85-45e0-b565-11db9addcad3-config-data-default\") pod \"openstack-galera-0\" (UID: \"6b45e98d-ae85-45e0-b565-11db9addcad3\") " pod="horizon-kuttl-tests/openstack-galera-0" Dec 06 15:48:56 crc kubenswrapper[5003]: I1206 15:48:56.407139 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/6433e1ae-768c-42ba-b961-4bd7bfba8701-kolla-config\") pod \"openstack-galera-2\" (UID: \"6433e1ae-768c-42ba-b961-4bd7bfba8701\") " pod="horizon-kuttl-tests/openstack-galera-2" Dec 06 15:48:56 crc kubenswrapper[5003]: I1206 15:48:56.406983 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/6b45e98d-ae85-45e0-b565-11db9addcad3-kolla-config\") pod \"openstack-galera-0\" (UID: \"6b45e98d-ae85-45e0-b565-11db9addcad3\") " pod="horizon-kuttl-tests/openstack-galera-0" Dec 06 15:48:56 crc kubenswrapper[5003]: I1206 15:48:56.407353 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/72422d98-0d90-4087-8aae-e78370f932b1-config-data-generated\") pod \"openstack-galera-1\" (UID: \"72422d98-0d90-4087-8aae-e78370f932b1\") " pod="horizon-kuttl-tests/openstack-galera-1" Dec 06 15:48:56 crc kubenswrapper[5003]: I1206 15:48:56.407479 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/6b45e98d-ae85-45e0-b565-11db9addcad3-config-data-generated\") pod \"openstack-galera-0\" (UID: \"6b45e98d-ae85-45e0-b565-11db9addcad3\") " pod="horizon-kuttl-tests/openstack-galera-0" Dec 06 15:48:56 crc kubenswrapper[5003]: I1206 15:48:56.407562 5003 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"openstack-galera-0\" (UID: \"6b45e98d-ae85-45e0-b565-11db9addcad3\") device mount path \"/mnt/openstack/pv11\"" pod="horizon-kuttl-tests/openstack-galera-0" Dec 06 15:48:56 crc kubenswrapper[5003]: I1206 15:48:56.407930 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/72422d98-0d90-4087-8aae-e78370f932b1-config-data-default\") pod \"openstack-galera-1\" (UID: \"72422d98-0d90-4087-8aae-e78370f932b1\") " pod="horizon-kuttl-tests/openstack-galera-1" Dec 06 15:48:56 crc kubenswrapper[5003]: I1206 15:48:56.408326 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/72422d98-0d90-4087-8aae-e78370f932b1-kolla-config\") pod \"openstack-galera-1\" (UID: \"72422d98-0d90-4087-8aae-e78370f932b1\") " pod="horizon-kuttl-tests/openstack-galera-1" Dec 06 15:48:56 crc kubenswrapper[5003]: I1206 15:48:56.408365 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6b45e98d-ae85-45e0-b565-11db9addcad3-operator-scripts\") pod \"openstack-galera-0\" (UID: \"6b45e98d-ae85-45e0-b565-11db9addcad3\") " pod="horizon-kuttl-tests/openstack-galera-0" Dec 06 15:48:56 crc kubenswrapper[5003]: I1206 15:48:56.408717 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/72422d98-0d90-4087-8aae-e78370f932b1-operator-scripts\") pod \"openstack-galera-1\" (UID: \"72422d98-0d90-4087-8aae-e78370f932b1\") " pod="horizon-kuttl-tests/openstack-galera-1" Dec 06 15:48:56 crc kubenswrapper[5003]: I1206 15:48:56.428800 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"openstack-galera-1\" (UID: \"72422d98-0d90-4087-8aae-e78370f932b1\") " pod="horizon-kuttl-tests/openstack-galera-1" Dec 06 15:48:56 crc kubenswrapper[5003]: I1206 15:48:56.434046 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2f6l6\" (UniqueName: \"kubernetes.io/projected/72422d98-0d90-4087-8aae-e78370f932b1-kube-api-access-2f6l6\") pod \"openstack-galera-1\" (UID: \"72422d98-0d90-4087-8aae-e78370f932b1\") " pod="horizon-kuttl-tests/openstack-galera-1" Dec 06 15:48:56 crc kubenswrapper[5003]: I1206 15:48:56.439032 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bcl4k\" (UniqueName: \"kubernetes.io/projected/6b45e98d-ae85-45e0-b565-11db9addcad3-kube-api-access-bcl4k\") pod \"openstack-galera-0\" (UID: \"6b45e98d-ae85-45e0-b565-11db9addcad3\") " pod="horizon-kuttl-tests/openstack-galera-0" Dec 06 15:48:56 crc kubenswrapper[5003]: I1206 15:48:56.445720 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"openstack-galera-0\" (UID: \"6b45e98d-ae85-45e0-b565-11db9addcad3\") " pod="horizon-kuttl-tests/openstack-galera-0" Dec 06 15:48:56 crc kubenswrapper[5003]: I1206 15:48:56.506983 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="horizon-kuttl-tests/openstack-galera-0" Dec 06 15:48:56 crc kubenswrapper[5003]: I1206 15:48:56.508304 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/6433e1ae-768c-42ba-b961-4bd7bfba8701-kolla-config\") pod \"openstack-galera-2\" (UID: \"6433e1ae-768c-42ba-b961-4bd7bfba8701\") " pod="horizon-kuttl-tests/openstack-galera-2" Dec 06 15:48:56 crc kubenswrapper[5003]: I1206 15:48:56.508377 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/6433e1ae-768c-42ba-b961-4bd7bfba8701-config-data-generated\") pod \"openstack-galera-2\" (UID: \"6433e1ae-768c-42ba-b961-4bd7bfba8701\") " pod="horizon-kuttl-tests/openstack-galera-2" Dec 06 15:48:56 crc kubenswrapper[5003]: I1206 15:48:56.508410 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qknf2\" (UniqueName: \"kubernetes.io/projected/6433e1ae-768c-42ba-b961-4bd7bfba8701-kube-api-access-qknf2\") pod \"openstack-galera-2\" (UID: \"6433e1ae-768c-42ba-b961-4bd7bfba8701\") " pod="horizon-kuttl-tests/openstack-galera-2" Dec 06 15:48:56 crc kubenswrapper[5003]: I1206 15:48:56.508449 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/6433e1ae-768c-42ba-b961-4bd7bfba8701-config-data-default\") pod \"openstack-galera-2\" (UID: \"6433e1ae-768c-42ba-b961-4bd7bfba8701\") " pod="horizon-kuttl-tests/openstack-galera-2" Dec 06 15:48:56 crc kubenswrapper[5003]: I1206 15:48:56.508527 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"openstack-galera-2\" (UID: \"6433e1ae-768c-42ba-b961-4bd7bfba8701\") " pod="horizon-kuttl-tests/openstack-galera-2" Dec 06 15:48:56 crc kubenswrapper[5003]: I1206 15:48:56.508576 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6433e1ae-768c-42ba-b961-4bd7bfba8701-operator-scripts\") pod \"openstack-galera-2\" (UID: \"6433e1ae-768c-42ba-b961-4bd7bfba8701\") " pod="horizon-kuttl-tests/openstack-galera-2" Dec 06 15:48:56 crc kubenswrapper[5003]: I1206 15:48:56.508959 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/6433e1ae-768c-42ba-b961-4bd7bfba8701-config-data-generated\") pod \"openstack-galera-2\" (UID: \"6433e1ae-768c-42ba-b961-4bd7bfba8701\") " pod="horizon-kuttl-tests/openstack-galera-2" Dec 06 15:48:56 crc kubenswrapper[5003]: I1206 15:48:56.509273 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/6433e1ae-768c-42ba-b961-4bd7bfba8701-kolla-config\") pod \"openstack-galera-2\" (UID: \"6433e1ae-768c-42ba-b961-4bd7bfba8701\") " pod="horizon-kuttl-tests/openstack-galera-2" Dec 06 15:48:56 crc kubenswrapper[5003]: I1206 15:48:56.509524 5003 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"openstack-galera-2\" (UID: \"6433e1ae-768c-42ba-b961-4bd7bfba8701\") device mount path \"/mnt/openstack/pv09\"" pod="horizon-kuttl-tests/openstack-galera-2" Dec 06 15:48:56 crc kubenswrapper[5003]: I1206 15:48:56.509801 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/6433e1ae-768c-42ba-b961-4bd7bfba8701-config-data-default\") pod \"openstack-galera-2\" (UID: \"6433e1ae-768c-42ba-b961-4bd7bfba8701\") " pod="horizon-kuttl-tests/openstack-galera-2" Dec 06 15:48:56 crc kubenswrapper[5003]: I1206 15:48:56.510695 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6433e1ae-768c-42ba-b961-4bd7bfba8701-operator-scripts\") pod \"openstack-galera-2\" (UID: \"6433e1ae-768c-42ba-b961-4bd7bfba8701\") " pod="horizon-kuttl-tests/openstack-galera-2" Dec 06 15:48:56 crc kubenswrapper[5003]: I1206 15:48:56.515776 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="horizon-kuttl-tests/openstack-galera-1" Dec 06 15:48:56 crc kubenswrapper[5003]: I1206 15:48:56.527217 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qknf2\" (UniqueName: \"kubernetes.io/projected/6433e1ae-768c-42ba-b961-4bd7bfba8701-kube-api-access-qknf2\") pod \"openstack-galera-2\" (UID: \"6433e1ae-768c-42ba-b961-4bd7bfba8701\") " pod="horizon-kuttl-tests/openstack-galera-2" Dec 06 15:48:56 crc kubenswrapper[5003]: I1206 15:48:56.527688 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"openstack-galera-2\" (UID: \"6433e1ae-768c-42ba-b961-4bd7bfba8701\") " pod="horizon-kuttl-tests/openstack-galera-2" Dec 06 15:48:56 crc kubenswrapper[5003]: I1206 15:48:56.789727 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["horizon-kuttl-tests/openstack-galera-1"] Dec 06 15:48:56 crc kubenswrapper[5003]: W1206 15:48:56.792935 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod72422d98_0d90_4087_8aae_e78370f932b1.slice/crio-8a09fa2ffad21ad150ebbe9f0910a228b7ffd3807a631b55e1adf579c5f15e5a WatchSource:0}: Error finding container 8a09fa2ffad21ad150ebbe9f0910a228b7ffd3807a631b55e1adf579c5f15e5a: Status 404 returned error can't find the container with id 8a09fa2ffad21ad150ebbe9f0910a228b7ffd3807a631b55e1adf579c5f15e5a Dec 06 15:48:56 crc kubenswrapper[5003]: I1206 15:48:56.830848 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="horizon-kuttl-tests/openstack-galera-2" Dec 06 15:48:56 crc kubenswrapper[5003]: I1206 15:48:56.912715 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-5b58584fcf-h8rqq" event={"ID":"0f4eeada-366f-4403-bd3e-54235105ef11","Type":"ContainerStarted","Data":"cdbc11c195fd2646a7c7283174463f1daa94d7c57c9b053c49590c8ed7770c9f"} Dec 06 15:48:56 crc kubenswrapper[5003]: I1206 15:48:56.912817 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/infra-operator-controller-manager-5b58584fcf-h8rqq" Dec 06 15:48:56 crc kubenswrapper[5003]: I1206 15:48:56.914884 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="horizon-kuttl-tests/openstack-galera-1" event={"ID":"72422d98-0d90-4087-8aae-e78370f932b1","Type":"ContainerStarted","Data":"8a09fa2ffad21ad150ebbe9f0910a228b7ffd3807a631b55e1adf579c5f15e5a"} Dec 06 15:48:56 crc kubenswrapper[5003]: I1206 15:48:56.936090 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/infra-operator-controller-manager-5b58584fcf-h8rqq" podStartSLOduration=1.95759846 podStartE2EDuration="3.936067458s" podCreationTimestamp="2025-12-06 15:48:53 +0000 UTC" firstStartedPulling="2025-12-06 15:48:54.436312553 +0000 UTC m=+1012.969666934" lastFinishedPulling="2025-12-06 15:48:56.414781551 +0000 UTC m=+1014.948135932" observedRunningTime="2025-12-06 15:48:56.932971063 +0000 UTC m=+1015.466325444" watchObservedRunningTime="2025-12-06 15:48:56.936067458 +0000 UTC m=+1015.469421859" Dec 06 15:48:56 crc kubenswrapper[5003]: I1206 15:48:56.967372 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["horizon-kuttl-tests/openstack-galera-0"] Dec 06 15:48:56 crc kubenswrapper[5003]: W1206 15:48:56.972129 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6b45e98d_ae85_45e0_b565_11db9addcad3.slice/crio-0dcc429a9bf15f3ced249fd46aee662a76d8c5492810dc6c36dbd9a491f163c6 WatchSource:0}: Error finding container 0dcc429a9bf15f3ced249fd46aee662a76d8c5492810dc6c36dbd9a491f163c6: Status 404 returned error can't find the container with id 0dcc429a9bf15f3ced249fd46aee662a76d8c5492810dc6c36dbd9a491f163c6 Dec 06 15:48:57 crc kubenswrapper[5003]: I1206 15:48:57.221866 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["horizon-kuttl-tests/openstack-galera-2"] Dec 06 15:48:57 crc kubenswrapper[5003]: W1206 15:48:57.232435 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6433e1ae_768c_42ba_b961_4bd7bfba8701.slice/crio-4792a8bf79d19819d2211dc61c35f34531de96a9d81fae032f20d0cc22f219cd WatchSource:0}: Error finding container 4792a8bf79d19819d2211dc61c35f34531de96a9d81fae032f20d0cc22f219cd: Status 404 returned error can't find the container with id 4792a8bf79d19819d2211dc61c35f34531de96a9d81fae032f20d0cc22f219cd Dec 06 15:48:57 crc kubenswrapper[5003]: I1206 15:48:57.951134 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="horizon-kuttl-tests/openstack-galera-0" event={"ID":"6b45e98d-ae85-45e0-b565-11db9addcad3","Type":"ContainerStarted","Data":"0dcc429a9bf15f3ced249fd46aee662a76d8c5492810dc6c36dbd9a491f163c6"} Dec 06 15:48:57 crc kubenswrapper[5003]: I1206 15:48:57.952236 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="horizon-kuttl-tests/openstack-galera-2" event={"ID":"6433e1ae-768c-42ba-b961-4bd7bfba8701","Type":"ContainerStarted","Data":"4792a8bf79d19819d2211dc61c35f34531de96a9d81fae032f20d0cc22f219cd"} Dec 06 15:49:03 crc kubenswrapper[5003]: I1206 15:49:03.994374 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/infra-operator-controller-manager-5b58584fcf-h8rqq" Dec 06 15:49:05 crc kubenswrapper[5003]: I1206 15:49:05.000482 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="horizon-kuttl-tests/openstack-galera-1" event={"ID":"72422d98-0d90-4087-8aae-e78370f932b1","Type":"ContainerStarted","Data":"41205e827227e20713cac2571c5fdf50dcc7dd7db3330f432ed8a4526fe4ba58"} Dec 06 15:49:05 crc kubenswrapper[5003]: I1206 15:49:05.003660 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="horizon-kuttl-tests/openstack-galera-0" event={"ID":"6b45e98d-ae85-45e0-b565-11db9addcad3","Type":"ContainerStarted","Data":"c5094b2f8afe0c8541ddfda3379dcc608d2a6e63c745cb7108302879122eecdf"} Dec 06 15:49:05 crc kubenswrapper[5003]: I1206 15:49:05.007025 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="horizon-kuttl-tests/openstack-galera-2" event={"ID":"6433e1ae-768c-42ba-b961-4bd7bfba8701","Type":"ContainerStarted","Data":"48f91b4bca2bb0bb84182a7178f812d724c21803359edfb6b556c9946bec902e"} Dec 06 15:49:06 crc kubenswrapper[5003]: I1206 15:49:06.437789 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["horizon-kuttl-tests/memcached-0"] Dec 06 15:49:06 crc kubenswrapper[5003]: I1206 15:49:06.438887 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="horizon-kuttl-tests/memcached-0" Dec 06 15:49:06 crc kubenswrapper[5003]: I1206 15:49:06.441321 5003 reflector.go:368] Caches populated for *v1.Secret from object-"horizon-kuttl-tests"/"memcached-memcached-dockercfg-lr5sg" Dec 06 15:49:06 crc kubenswrapper[5003]: I1206 15:49:06.442543 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"horizon-kuttl-tests"/"memcached-config-data" Dec 06 15:49:06 crc kubenswrapper[5003]: I1206 15:49:06.454905 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["horizon-kuttl-tests/memcached-0"] Dec 06 15:49:06 crc kubenswrapper[5003]: I1206 15:49:06.560460 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d25w5\" (UniqueName: \"kubernetes.io/projected/69802b43-d4de-4ef5-9e10-9405562de3e7-kube-api-access-d25w5\") pod \"memcached-0\" (UID: \"69802b43-d4de-4ef5-9e10-9405562de3e7\") " pod="horizon-kuttl-tests/memcached-0" Dec 06 15:49:06 crc kubenswrapper[5003]: I1206 15:49:06.560539 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/69802b43-d4de-4ef5-9e10-9405562de3e7-config-data\") pod \"memcached-0\" (UID: \"69802b43-d4de-4ef5-9e10-9405562de3e7\") " pod="horizon-kuttl-tests/memcached-0" Dec 06 15:49:06 crc kubenswrapper[5003]: I1206 15:49:06.560606 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/69802b43-d4de-4ef5-9e10-9405562de3e7-kolla-config\") pod \"memcached-0\" (UID: \"69802b43-d4de-4ef5-9e10-9405562de3e7\") " pod="horizon-kuttl-tests/memcached-0" Dec 06 15:49:06 crc kubenswrapper[5003]: I1206 15:49:06.662177 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d25w5\" (UniqueName: \"kubernetes.io/projected/69802b43-d4de-4ef5-9e10-9405562de3e7-kube-api-access-d25w5\") pod \"memcached-0\" (UID: \"69802b43-d4de-4ef5-9e10-9405562de3e7\") " pod="horizon-kuttl-tests/memcached-0" Dec 06 15:49:06 crc kubenswrapper[5003]: I1206 15:49:06.662244 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/69802b43-d4de-4ef5-9e10-9405562de3e7-config-data\") pod \"memcached-0\" (UID: \"69802b43-d4de-4ef5-9e10-9405562de3e7\") " pod="horizon-kuttl-tests/memcached-0" Dec 06 15:49:06 crc kubenswrapper[5003]: I1206 15:49:06.662317 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/69802b43-d4de-4ef5-9e10-9405562de3e7-kolla-config\") pod \"memcached-0\" (UID: \"69802b43-d4de-4ef5-9e10-9405562de3e7\") " pod="horizon-kuttl-tests/memcached-0" Dec 06 15:49:06 crc kubenswrapper[5003]: I1206 15:49:06.663279 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/69802b43-d4de-4ef5-9e10-9405562de3e7-config-data\") pod \"memcached-0\" (UID: \"69802b43-d4de-4ef5-9e10-9405562de3e7\") " pod="horizon-kuttl-tests/memcached-0" Dec 06 15:49:06 crc kubenswrapper[5003]: I1206 15:49:06.663405 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/69802b43-d4de-4ef5-9e10-9405562de3e7-kolla-config\") pod \"memcached-0\" (UID: \"69802b43-d4de-4ef5-9e10-9405562de3e7\") " pod="horizon-kuttl-tests/memcached-0" Dec 06 15:49:06 crc kubenswrapper[5003]: I1206 15:49:06.680332 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d25w5\" (UniqueName: \"kubernetes.io/projected/69802b43-d4de-4ef5-9e10-9405562de3e7-kube-api-access-d25w5\") pod \"memcached-0\" (UID: \"69802b43-d4de-4ef5-9e10-9405562de3e7\") " pod="horizon-kuttl-tests/memcached-0" Dec 06 15:49:06 crc kubenswrapper[5003]: I1206 15:49:06.757002 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="horizon-kuttl-tests/memcached-0" Dec 06 15:49:07 crc kubenswrapper[5003]: I1206 15:49:07.188386 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["horizon-kuttl-tests/memcached-0"] Dec 06 15:49:07 crc kubenswrapper[5003]: W1206 15:49:07.200544 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod69802b43_d4de_4ef5_9e10_9405562de3e7.slice/crio-5cb3ac3c117c6db88e2e379530af05ae8b64cf5e9a10e69445b954e4a6269a1b WatchSource:0}: Error finding container 5cb3ac3c117c6db88e2e379530af05ae8b64cf5e9a10e69445b954e4a6269a1b: Status 404 returned error can't find the container with id 5cb3ac3c117c6db88e2e379530af05ae8b64cf5e9a10e69445b954e4a6269a1b Dec 06 15:49:08 crc kubenswrapper[5003]: I1206 15:49:08.024056 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="horizon-kuttl-tests/memcached-0" event={"ID":"69802b43-d4de-4ef5-9e10-9405562de3e7","Type":"ContainerStarted","Data":"5cb3ac3c117c6db88e2e379530af05ae8b64cf5e9a10e69445b954e4a6269a1b"} Dec 06 15:49:09 crc kubenswrapper[5003]: I1206 15:49:09.040299 5003 generic.go:334] "Generic (PLEG): container finished" podID="6b45e98d-ae85-45e0-b565-11db9addcad3" containerID="c5094b2f8afe0c8541ddfda3379dcc608d2a6e63c745cb7108302879122eecdf" exitCode=0 Dec 06 15:49:09 crc kubenswrapper[5003]: I1206 15:49:09.040421 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="horizon-kuttl-tests/openstack-galera-0" event={"ID":"6b45e98d-ae85-45e0-b565-11db9addcad3","Type":"ContainerDied","Data":"c5094b2f8afe0c8541ddfda3379dcc608d2a6e63c745cb7108302879122eecdf"} Dec 06 15:49:09 crc kubenswrapper[5003]: I1206 15:49:09.044165 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="horizon-kuttl-tests/openstack-galera-2" event={"ID":"6433e1ae-768c-42ba-b961-4bd7bfba8701","Type":"ContainerDied","Data":"48f91b4bca2bb0bb84182a7178f812d724c21803359edfb6b556c9946bec902e"} Dec 06 15:49:09 crc kubenswrapper[5003]: I1206 15:49:09.045525 5003 generic.go:334] "Generic (PLEG): container finished" podID="6433e1ae-768c-42ba-b961-4bd7bfba8701" containerID="48f91b4bca2bb0bb84182a7178f812d724c21803359edfb6b556c9946bec902e" exitCode=0 Dec 06 15:49:09 crc kubenswrapper[5003]: I1206 15:49:09.049935 5003 generic.go:334] "Generic (PLEG): container finished" podID="72422d98-0d90-4087-8aae-e78370f932b1" containerID="41205e827227e20713cac2571c5fdf50dcc7dd7db3330f432ed8a4526fe4ba58" exitCode=0 Dec 06 15:49:09 crc kubenswrapper[5003]: I1206 15:49:09.050013 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="horizon-kuttl-tests/openstack-galera-1" event={"ID":"72422d98-0d90-4087-8aae-e78370f932b1","Type":"ContainerDied","Data":"41205e827227e20713cac2571c5fdf50dcc7dd7db3330f432ed8a4526fe4ba58"} Dec 06 15:49:09 crc kubenswrapper[5003]: I1206 15:49:09.289275 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-index-zz7c5"] Dec 06 15:49:09 crc kubenswrapper[5003]: I1206 15:49:09.295849 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-index-zz7c5" Dec 06 15:49:09 crc kubenswrapper[5003]: I1206 15:49:09.306142 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"rabbitmq-cluster-operator-index-dockercfg-nz652" Dec 06 15:49:09 crc kubenswrapper[5003]: I1206 15:49:09.307643 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k5ct9\" (UniqueName: \"kubernetes.io/projected/5a66fbb7-47ca-491c-aed8-56580ee677c9-kube-api-access-k5ct9\") pod \"rabbitmq-cluster-operator-index-zz7c5\" (UID: \"5a66fbb7-47ca-491c-aed8-56580ee677c9\") " pod="openstack-operators/rabbitmq-cluster-operator-index-zz7c5" Dec 06 15:49:09 crc kubenswrapper[5003]: I1206 15:49:09.331565 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-index-zz7c5"] Dec 06 15:49:09 crc kubenswrapper[5003]: I1206 15:49:09.408701 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k5ct9\" (UniqueName: \"kubernetes.io/projected/5a66fbb7-47ca-491c-aed8-56580ee677c9-kube-api-access-k5ct9\") pod \"rabbitmq-cluster-operator-index-zz7c5\" (UID: \"5a66fbb7-47ca-491c-aed8-56580ee677c9\") " pod="openstack-operators/rabbitmq-cluster-operator-index-zz7c5" Dec 06 15:49:09 crc kubenswrapper[5003]: I1206 15:49:09.445306 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k5ct9\" (UniqueName: \"kubernetes.io/projected/5a66fbb7-47ca-491c-aed8-56580ee677c9-kube-api-access-k5ct9\") pod \"rabbitmq-cluster-operator-index-zz7c5\" (UID: \"5a66fbb7-47ca-491c-aed8-56580ee677c9\") " pod="openstack-operators/rabbitmq-cluster-operator-index-zz7c5" Dec 06 15:49:09 crc kubenswrapper[5003]: I1206 15:49:09.615315 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-index-zz7c5" Dec 06 15:49:10 crc kubenswrapper[5003]: I1206 15:49:10.611797 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-index-zz7c5"] Dec 06 15:49:11 crc kubenswrapper[5003]: I1206 15:49:11.064761 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="horizon-kuttl-tests/openstack-galera-0" event={"ID":"6b45e98d-ae85-45e0-b565-11db9addcad3","Type":"ContainerStarted","Data":"133fc0f206a2ac359eedda713017b70e001923ecd02badee3cfce4e2f77bbc57"} Dec 06 15:49:11 crc kubenswrapper[5003]: I1206 15:49:11.067369 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="horizon-kuttl-tests/memcached-0" event={"ID":"69802b43-d4de-4ef5-9e10-9405562de3e7","Type":"ContainerStarted","Data":"f4a85997302246bacff4774e7dabbc039ee9f5e10350d2603860ea80dc4d7e84"} Dec 06 15:49:11 crc kubenswrapper[5003]: I1206 15:49:11.067545 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="horizon-kuttl-tests/memcached-0" Dec 06 15:49:11 crc kubenswrapper[5003]: I1206 15:49:11.069269 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="horizon-kuttl-tests/openstack-galera-2" event={"ID":"6433e1ae-768c-42ba-b961-4bd7bfba8701","Type":"ContainerStarted","Data":"3be2e48606ac441812b02fb904997ab6234188ea0ee8feeb3fc904dacb84da5f"} Dec 06 15:49:11 crc kubenswrapper[5003]: I1206 15:49:11.070996 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="horizon-kuttl-tests/openstack-galera-1" event={"ID":"72422d98-0d90-4087-8aae-e78370f932b1","Type":"ContainerStarted","Data":"4bc3c2e758d8982d595f10a53008c877c956e5fc303fbcb4c027c58fa9a1e430"} Dec 06 15:49:11 crc kubenswrapper[5003]: I1206 15:49:11.071754 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-index-zz7c5" event={"ID":"5a66fbb7-47ca-491c-aed8-56580ee677c9","Type":"ContainerStarted","Data":"61a579f12cf953182afa9edca627af4ad7538c207f44d47bf910ac9804ee7277"} Dec 06 15:49:11 crc kubenswrapper[5003]: I1206 15:49:11.089740 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="horizon-kuttl-tests/openstack-galera-0" podStartSLOduration=8.963764132 podStartE2EDuration="16.089718852s" podCreationTimestamp="2025-12-06 15:48:55 +0000 UTC" firstStartedPulling="2025-12-06 15:48:56.97458993 +0000 UTC m=+1015.507944311" lastFinishedPulling="2025-12-06 15:49:04.10054465 +0000 UTC m=+1022.633899031" observedRunningTime="2025-12-06 15:49:11.086008659 +0000 UTC m=+1029.619363060" watchObservedRunningTime="2025-12-06 15:49:11.089718852 +0000 UTC m=+1029.623073223" Dec 06 15:49:11 crc kubenswrapper[5003]: I1206 15:49:11.114021 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="horizon-kuttl-tests/openstack-galera-2" podStartSLOduration=9.205596696 podStartE2EDuration="16.11399228s" podCreationTimestamp="2025-12-06 15:48:55 +0000 UTC" firstStartedPulling="2025-12-06 15:48:57.235619244 +0000 UTC m=+1015.768973625" lastFinishedPulling="2025-12-06 15:49:04.144014828 +0000 UTC m=+1022.677369209" observedRunningTime="2025-12-06 15:49:11.109132967 +0000 UTC m=+1029.642487358" watchObservedRunningTime="2025-12-06 15:49:11.11399228 +0000 UTC m=+1029.647346661" Dec 06 15:49:11 crc kubenswrapper[5003]: I1206 15:49:11.141336 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="horizon-kuttl-tests/openstack-galera-1" podStartSLOduration=8.852074144 podStartE2EDuration="16.141314684s" podCreationTimestamp="2025-12-06 15:48:55 +0000 UTC" firstStartedPulling="2025-12-06 15:48:56.795302409 +0000 UTC m=+1015.328656790" lastFinishedPulling="2025-12-06 15:49:04.084542949 +0000 UTC m=+1022.617897330" observedRunningTime="2025-12-06 15:49:11.135403011 +0000 UTC m=+1029.668757392" watchObservedRunningTime="2025-12-06 15:49:11.141314684 +0000 UTC m=+1029.674669065" Dec 06 15:49:11 crc kubenswrapper[5003]: I1206 15:49:11.743542 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="horizon-kuttl-tests/memcached-0" podStartSLOduration=2.530287309 podStartE2EDuration="5.743519801s" podCreationTimestamp="2025-12-06 15:49:06 +0000 UTC" firstStartedPulling="2025-12-06 15:49:07.203851221 +0000 UTC m=+1025.737205602" lastFinishedPulling="2025-12-06 15:49:10.417083713 +0000 UTC m=+1028.950438094" observedRunningTime="2025-12-06 15:49:11.15930505 +0000 UTC m=+1029.692659441" watchObservedRunningTime="2025-12-06 15:49:11.743519801 +0000 UTC m=+1030.276874182" Dec 06 15:49:13 crc kubenswrapper[5003]: I1206 15:49:13.656595 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-index-zz7c5"] Dec 06 15:49:14 crc kubenswrapper[5003]: I1206 15:49:14.116743 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-index-zz7c5" event={"ID":"5a66fbb7-47ca-491c-aed8-56580ee677c9","Type":"ContainerStarted","Data":"85d8fbcbf5e8c27a4fde8769af715398a347076c69a7ca421582597694d1230f"} Dec 06 15:49:14 crc kubenswrapper[5003]: I1206 15:49:14.136688 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/rabbitmq-cluster-operator-index-zz7c5" podStartSLOduration=2.401186805 podStartE2EDuration="5.136665218s" podCreationTimestamp="2025-12-06 15:49:09 +0000 UTC" firstStartedPulling="2025-12-06 15:49:10.632762527 +0000 UTC m=+1029.166116908" lastFinishedPulling="2025-12-06 15:49:13.36824094 +0000 UTC m=+1031.901595321" observedRunningTime="2025-12-06 15:49:14.135179118 +0000 UTC m=+1032.668533509" watchObservedRunningTime="2025-12-06 15:49:14.136665218 +0000 UTC m=+1032.670019599" Dec 06 15:49:14 crc kubenswrapper[5003]: I1206 15:49:14.270900 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-index-cvt2f"] Dec 06 15:49:14 crc kubenswrapper[5003]: I1206 15:49:14.271954 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-index-cvt2f" Dec 06 15:49:14 crc kubenswrapper[5003]: I1206 15:49:14.277502 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-index-cvt2f"] Dec 06 15:49:14 crc kubenswrapper[5003]: I1206 15:49:14.389569 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tz4b2\" (UniqueName: \"kubernetes.io/projected/9527a213-4c9a-4477-9876-1b8572119c9a-kube-api-access-tz4b2\") pod \"rabbitmq-cluster-operator-index-cvt2f\" (UID: \"9527a213-4c9a-4477-9876-1b8572119c9a\") " pod="openstack-operators/rabbitmq-cluster-operator-index-cvt2f" Dec 06 15:49:14 crc kubenswrapper[5003]: I1206 15:49:14.491587 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tz4b2\" (UniqueName: \"kubernetes.io/projected/9527a213-4c9a-4477-9876-1b8572119c9a-kube-api-access-tz4b2\") pod \"rabbitmq-cluster-operator-index-cvt2f\" (UID: \"9527a213-4c9a-4477-9876-1b8572119c9a\") " pod="openstack-operators/rabbitmq-cluster-operator-index-cvt2f" Dec 06 15:49:14 crc kubenswrapper[5003]: I1206 15:49:14.513801 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tz4b2\" (UniqueName: \"kubernetes.io/projected/9527a213-4c9a-4477-9876-1b8572119c9a-kube-api-access-tz4b2\") pod \"rabbitmq-cluster-operator-index-cvt2f\" (UID: \"9527a213-4c9a-4477-9876-1b8572119c9a\") " pod="openstack-operators/rabbitmq-cluster-operator-index-cvt2f" Dec 06 15:49:14 crc kubenswrapper[5003]: I1206 15:49:14.647562 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-index-cvt2f" Dec 06 15:49:15 crc kubenswrapper[5003]: I1206 15:49:15.122062 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/rabbitmq-cluster-operator-index-zz7c5" podUID="5a66fbb7-47ca-491c-aed8-56580ee677c9" containerName="registry-server" containerID="cri-o://85d8fbcbf5e8c27a4fde8769af715398a347076c69a7ca421582597694d1230f" gracePeriod=2 Dec 06 15:49:15 crc kubenswrapper[5003]: I1206 15:49:15.137567 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-index-cvt2f"] Dec 06 15:49:15 crc kubenswrapper[5003]: W1206 15:49:15.139198 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9527a213_4c9a_4477_9876_1b8572119c9a.slice/crio-1d1de4f3c810c96c4ed6379f49983f08a077bcd1fe0f7986e0a481ca023bbbfe WatchSource:0}: Error finding container 1d1de4f3c810c96c4ed6379f49983f08a077bcd1fe0f7986e0a481ca023bbbfe: Status 404 returned error can't find the container with id 1d1de4f3c810c96c4ed6379f49983f08a077bcd1fe0f7986e0a481ca023bbbfe Dec 06 15:49:16 crc kubenswrapper[5003]: I1206 15:49:16.175816 5003 generic.go:334] "Generic (PLEG): container finished" podID="5a66fbb7-47ca-491c-aed8-56580ee677c9" containerID="85d8fbcbf5e8c27a4fde8769af715398a347076c69a7ca421582597694d1230f" exitCode=0 Dec 06 15:49:16 crc kubenswrapper[5003]: I1206 15:49:16.176716 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-index-zz7c5" event={"ID":"5a66fbb7-47ca-491c-aed8-56580ee677c9","Type":"ContainerDied","Data":"85d8fbcbf5e8c27a4fde8769af715398a347076c69a7ca421582597694d1230f"} Dec 06 15:49:16 crc kubenswrapper[5003]: I1206 15:49:16.183761 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-index-cvt2f" event={"ID":"9527a213-4c9a-4477-9876-1b8572119c9a","Type":"ContainerStarted","Data":"1d1de4f3c810c96c4ed6379f49983f08a077bcd1fe0f7986e0a481ca023bbbfe"} Dec 06 15:49:16 crc kubenswrapper[5003]: I1206 15:49:16.508074 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="horizon-kuttl-tests/openstack-galera-0" Dec 06 15:49:16 crc kubenswrapper[5003]: I1206 15:49:16.508143 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="horizon-kuttl-tests/openstack-galera-0" Dec 06 15:49:16 crc kubenswrapper[5003]: I1206 15:49:16.516932 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="horizon-kuttl-tests/openstack-galera-1" Dec 06 15:49:16 crc kubenswrapper[5003]: I1206 15:49:16.516996 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="horizon-kuttl-tests/openstack-galera-1" Dec 06 15:49:16 crc kubenswrapper[5003]: I1206 15:49:16.758646 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="horizon-kuttl-tests/memcached-0" Dec 06 15:49:16 crc kubenswrapper[5003]: I1206 15:49:16.831707 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="horizon-kuttl-tests/openstack-galera-2" Dec 06 15:49:16 crc kubenswrapper[5003]: I1206 15:49:16.831991 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="horizon-kuttl-tests/openstack-galera-2" Dec 06 15:49:17 crc kubenswrapper[5003]: I1206 15:49:17.668726 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-index-zz7c5" Dec 06 15:49:17 crc kubenswrapper[5003]: I1206 15:49:17.841393 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k5ct9\" (UniqueName: \"kubernetes.io/projected/5a66fbb7-47ca-491c-aed8-56580ee677c9-kube-api-access-k5ct9\") pod \"5a66fbb7-47ca-491c-aed8-56580ee677c9\" (UID: \"5a66fbb7-47ca-491c-aed8-56580ee677c9\") " Dec 06 15:49:17 crc kubenswrapper[5003]: I1206 15:49:17.848673 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5a66fbb7-47ca-491c-aed8-56580ee677c9-kube-api-access-k5ct9" (OuterVolumeSpecName: "kube-api-access-k5ct9") pod "5a66fbb7-47ca-491c-aed8-56580ee677c9" (UID: "5a66fbb7-47ca-491c-aed8-56580ee677c9"). InnerVolumeSpecName "kube-api-access-k5ct9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 15:49:17 crc kubenswrapper[5003]: I1206 15:49:17.942666 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k5ct9\" (UniqueName: \"kubernetes.io/projected/5a66fbb7-47ca-491c-aed8-56580ee677c9-kube-api-access-k5ct9\") on node \"crc\" DevicePath \"\"" Dec 06 15:49:18 crc kubenswrapper[5003]: I1206 15:49:18.198602 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-index-cvt2f" event={"ID":"9527a213-4c9a-4477-9876-1b8572119c9a","Type":"ContainerStarted","Data":"2c08ff6901571d98aeae0c6c15cd5e31caab7c4427674a10ce65f40b92e74d62"} Dec 06 15:49:18 crc kubenswrapper[5003]: I1206 15:49:18.200003 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-index-zz7c5" event={"ID":"5a66fbb7-47ca-491c-aed8-56580ee677c9","Type":"ContainerDied","Data":"61a579f12cf953182afa9edca627af4ad7538c207f44d47bf910ac9804ee7277"} Dec 06 15:49:18 crc kubenswrapper[5003]: I1206 15:49:18.200065 5003 scope.go:117] "RemoveContainer" containerID="85d8fbcbf5e8c27a4fde8769af715398a347076c69a7ca421582597694d1230f" Dec 06 15:49:18 crc kubenswrapper[5003]: I1206 15:49:18.200065 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-index-zz7c5" Dec 06 15:49:18 crc kubenswrapper[5003]: I1206 15:49:18.218903 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/rabbitmq-cluster-operator-index-cvt2f" podStartSLOduration=1.94610872 podStartE2EDuration="4.2188796s" podCreationTimestamp="2025-12-06 15:49:14 +0000 UTC" firstStartedPulling="2025-12-06 15:49:15.143701734 +0000 UTC m=+1033.677056115" lastFinishedPulling="2025-12-06 15:49:17.416472614 +0000 UTC m=+1035.949826995" observedRunningTime="2025-12-06 15:49:18.215754504 +0000 UTC m=+1036.749108965" watchObservedRunningTime="2025-12-06 15:49:18.2188796 +0000 UTC m=+1036.752233981" Dec 06 15:49:18 crc kubenswrapper[5003]: I1206 15:49:18.232143 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-index-zz7c5"] Dec 06 15:49:18 crc kubenswrapper[5003]: I1206 15:49:18.236583 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-index-zz7c5"] Dec 06 15:49:19 crc kubenswrapper[5003]: I1206 15:49:19.726311 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5a66fbb7-47ca-491c-aed8-56580ee677c9" path="/var/lib/kubelet/pods/5a66fbb7-47ca-491c-aed8-56580ee677c9/volumes" Dec 06 15:49:21 crc kubenswrapper[5003]: I1206 15:49:21.517308 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="horizon-kuttl-tests/openstack-galera-2" Dec 06 15:49:21 crc kubenswrapper[5003]: I1206 15:49:21.578142 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="horizon-kuttl-tests/openstack-galera-2" Dec 06 15:49:24 crc kubenswrapper[5003]: I1206 15:49:24.649381 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-operators/rabbitmq-cluster-operator-index-cvt2f" Dec 06 15:49:24 crc kubenswrapper[5003]: I1206 15:49:24.650221 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/rabbitmq-cluster-operator-index-cvt2f" Dec 06 15:49:24 crc kubenswrapper[5003]: I1206 15:49:24.681667 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-operators/rabbitmq-cluster-operator-index-cvt2f" Dec 06 15:49:25 crc kubenswrapper[5003]: I1206 15:49:25.268496 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/rabbitmq-cluster-operator-index-cvt2f" Dec 06 15:49:26 crc kubenswrapper[5003]: I1206 15:49:26.908639 5003 prober.go:107] "Probe failed" probeType="Readiness" pod="horizon-kuttl-tests/openstack-galera-2" podUID="6433e1ae-768c-42ba-b961-4bd7bfba8701" containerName="galera" probeResult="failure" output=< Dec 06 15:49:26 crc kubenswrapper[5003]: wsrep_local_state_comment (Donor/Desynced) differs from Synced Dec 06 15:49:26 crc kubenswrapper[5003]: > Dec 06 15:49:32 crc kubenswrapper[5003]: I1206 15:49:32.914257 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590fzkbm"] Dec 06 15:49:32 crc kubenswrapper[5003]: E1206 15:49:32.914959 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5a66fbb7-47ca-491c-aed8-56580ee677c9" containerName="registry-server" Dec 06 15:49:32 crc kubenswrapper[5003]: I1206 15:49:32.914979 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="5a66fbb7-47ca-491c-aed8-56580ee677c9" containerName="registry-server" Dec 06 15:49:32 crc kubenswrapper[5003]: I1206 15:49:32.915136 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="5a66fbb7-47ca-491c-aed8-56580ee677c9" containerName="registry-server" Dec 06 15:49:32 crc kubenswrapper[5003]: I1206 15:49:32.916217 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590fzkbm" Dec 06 15:49:32 crc kubenswrapper[5003]: I1206 15:49:32.919568 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"default-dockercfg-kqjk5" Dec 06 15:49:32 crc kubenswrapper[5003]: I1206 15:49:32.927096 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590fzkbm"] Dec 06 15:49:33 crc kubenswrapper[5003]: I1206 15:49:33.059582 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/20390cae-6054-4766-8ff4-48402fd00916-util\") pod \"9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590fzkbm\" (UID: \"20390cae-6054-4766-8ff4-48402fd00916\") " pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590fzkbm" Dec 06 15:49:33 crc kubenswrapper[5003]: I1206 15:49:33.059688 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wwtgl\" (UniqueName: \"kubernetes.io/projected/20390cae-6054-4766-8ff4-48402fd00916-kube-api-access-wwtgl\") pod \"9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590fzkbm\" (UID: \"20390cae-6054-4766-8ff4-48402fd00916\") " pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590fzkbm" Dec 06 15:49:33 crc kubenswrapper[5003]: I1206 15:49:33.059741 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/20390cae-6054-4766-8ff4-48402fd00916-bundle\") pod \"9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590fzkbm\" (UID: \"20390cae-6054-4766-8ff4-48402fd00916\") " pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590fzkbm" Dec 06 15:49:33 crc kubenswrapper[5003]: I1206 15:49:33.160805 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/20390cae-6054-4766-8ff4-48402fd00916-util\") pod \"9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590fzkbm\" (UID: \"20390cae-6054-4766-8ff4-48402fd00916\") " pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590fzkbm" Dec 06 15:49:33 crc kubenswrapper[5003]: I1206 15:49:33.160900 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wwtgl\" (UniqueName: \"kubernetes.io/projected/20390cae-6054-4766-8ff4-48402fd00916-kube-api-access-wwtgl\") pod \"9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590fzkbm\" (UID: \"20390cae-6054-4766-8ff4-48402fd00916\") " pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590fzkbm" Dec 06 15:49:33 crc kubenswrapper[5003]: I1206 15:49:33.160966 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/20390cae-6054-4766-8ff4-48402fd00916-bundle\") pod \"9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590fzkbm\" (UID: \"20390cae-6054-4766-8ff4-48402fd00916\") " pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590fzkbm" Dec 06 15:49:33 crc kubenswrapper[5003]: I1206 15:49:33.161476 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/20390cae-6054-4766-8ff4-48402fd00916-util\") pod \"9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590fzkbm\" (UID: \"20390cae-6054-4766-8ff4-48402fd00916\") " pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590fzkbm" Dec 06 15:49:33 crc kubenswrapper[5003]: I1206 15:49:33.161573 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/20390cae-6054-4766-8ff4-48402fd00916-bundle\") pod \"9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590fzkbm\" (UID: \"20390cae-6054-4766-8ff4-48402fd00916\") " pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590fzkbm" Dec 06 15:49:33 crc kubenswrapper[5003]: I1206 15:49:33.184602 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wwtgl\" (UniqueName: \"kubernetes.io/projected/20390cae-6054-4766-8ff4-48402fd00916-kube-api-access-wwtgl\") pod \"9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590fzkbm\" (UID: \"20390cae-6054-4766-8ff4-48402fd00916\") " pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590fzkbm" Dec 06 15:49:33 crc kubenswrapper[5003]: I1206 15:49:33.251770 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590fzkbm" Dec 06 15:49:33 crc kubenswrapper[5003]: I1206 15:49:33.700668 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590fzkbm"] Dec 06 15:49:33 crc kubenswrapper[5003]: W1206 15:49:33.710453 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod20390cae_6054_4766_8ff4_48402fd00916.slice/crio-85950e83b69cb0039e4d6b8f96d5a894646fc9d750ddcffa17f28e5c24a5e0a0 WatchSource:0}: Error finding container 85950e83b69cb0039e4d6b8f96d5a894646fc9d750ddcffa17f28e5c24a5e0a0: Status 404 returned error can't find the container with id 85950e83b69cb0039e4d6b8f96d5a894646fc9d750ddcffa17f28e5c24a5e0a0 Dec 06 15:49:34 crc kubenswrapper[5003]: I1206 15:49:34.303258 5003 generic.go:334] "Generic (PLEG): container finished" podID="20390cae-6054-4766-8ff4-48402fd00916" containerID="b483224610044675bd95a22c414108875c122a9130077a4962e73c2bcf9f630b" exitCode=0 Dec 06 15:49:34 crc kubenswrapper[5003]: I1206 15:49:34.303324 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590fzkbm" event={"ID":"20390cae-6054-4766-8ff4-48402fd00916","Type":"ContainerDied","Data":"b483224610044675bd95a22c414108875c122a9130077a4962e73c2bcf9f630b"} Dec 06 15:49:34 crc kubenswrapper[5003]: I1206 15:49:34.303363 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590fzkbm" event={"ID":"20390cae-6054-4766-8ff4-48402fd00916","Type":"ContainerStarted","Data":"85950e83b69cb0039e4d6b8f96d5a894646fc9d750ddcffa17f28e5c24a5e0a0"} Dec 06 15:49:34 crc kubenswrapper[5003]: I1206 15:49:34.444428 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="horizon-kuttl-tests/openstack-galera-1" Dec 06 15:49:34 crc kubenswrapper[5003]: I1206 15:49:34.523755 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="horizon-kuttl-tests/openstack-galera-1" Dec 06 15:49:36 crc kubenswrapper[5003]: I1206 15:49:36.319141 5003 generic.go:334] "Generic (PLEG): container finished" podID="20390cae-6054-4766-8ff4-48402fd00916" containerID="e5ac28d7096c779da08aa683ede92fe0eae4d2423f15764452c422549471be10" exitCode=0 Dec 06 15:49:36 crc kubenswrapper[5003]: I1206 15:49:36.319261 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590fzkbm" event={"ID":"20390cae-6054-4766-8ff4-48402fd00916","Type":"ContainerDied","Data":"e5ac28d7096c779da08aa683ede92fe0eae4d2423f15764452c422549471be10"} Dec 06 15:49:37 crc kubenswrapper[5003]: I1206 15:49:37.329188 5003 generic.go:334] "Generic (PLEG): container finished" podID="20390cae-6054-4766-8ff4-48402fd00916" containerID="2000dcc5d69c8b5474589b13e6409f895e4aa8079dbf5c1e6c486a8395c2d4a7" exitCode=0 Dec 06 15:49:37 crc kubenswrapper[5003]: I1206 15:49:37.329252 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590fzkbm" event={"ID":"20390cae-6054-4766-8ff4-48402fd00916","Type":"ContainerDied","Data":"2000dcc5d69c8b5474589b13e6409f895e4aa8079dbf5c1e6c486a8395c2d4a7"} Dec 06 15:49:37 crc kubenswrapper[5003]: I1206 15:49:37.527445 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="horizon-kuttl-tests/openstack-galera-0" Dec 06 15:49:37 crc kubenswrapper[5003]: I1206 15:49:37.608621 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="horizon-kuttl-tests/openstack-galera-0" Dec 06 15:49:38 crc kubenswrapper[5003]: I1206 15:49:38.707598 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590fzkbm" Dec 06 15:49:38 crc kubenswrapper[5003]: I1206 15:49:38.750814 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/20390cae-6054-4766-8ff4-48402fd00916-util\") pod \"20390cae-6054-4766-8ff4-48402fd00916\" (UID: \"20390cae-6054-4766-8ff4-48402fd00916\") " Dec 06 15:49:38 crc kubenswrapper[5003]: I1206 15:49:38.750987 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wwtgl\" (UniqueName: \"kubernetes.io/projected/20390cae-6054-4766-8ff4-48402fd00916-kube-api-access-wwtgl\") pod \"20390cae-6054-4766-8ff4-48402fd00916\" (UID: \"20390cae-6054-4766-8ff4-48402fd00916\") " Dec 06 15:49:38 crc kubenswrapper[5003]: I1206 15:49:38.751165 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/20390cae-6054-4766-8ff4-48402fd00916-bundle\") pod \"20390cae-6054-4766-8ff4-48402fd00916\" (UID: \"20390cae-6054-4766-8ff4-48402fd00916\") " Dec 06 15:49:38 crc kubenswrapper[5003]: I1206 15:49:38.751652 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/20390cae-6054-4766-8ff4-48402fd00916-bundle" (OuterVolumeSpecName: "bundle") pod "20390cae-6054-4766-8ff4-48402fd00916" (UID: "20390cae-6054-4766-8ff4-48402fd00916"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 15:49:38 crc kubenswrapper[5003]: I1206 15:49:38.756001 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/20390cae-6054-4766-8ff4-48402fd00916-kube-api-access-wwtgl" (OuterVolumeSpecName: "kube-api-access-wwtgl") pod "20390cae-6054-4766-8ff4-48402fd00916" (UID: "20390cae-6054-4766-8ff4-48402fd00916"). InnerVolumeSpecName "kube-api-access-wwtgl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 15:49:38 crc kubenswrapper[5003]: I1206 15:49:38.763630 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/20390cae-6054-4766-8ff4-48402fd00916-util" (OuterVolumeSpecName: "util") pod "20390cae-6054-4766-8ff4-48402fd00916" (UID: "20390cae-6054-4766-8ff4-48402fd00916"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 15:49:38 crc kubenswrapper[5003]: I1206 15:49:38.852419 5003 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/20390cae-6054-4766-8ff4-48402fd00916-bundle\") on node \"crc\" DevicePath \"\"" Dec 06 15:49:38 crc kubenswrapper[5003]: I1206 15:49:38.852461 5003 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/20390cae-6054-4766-8ff4-48402fd00916-util\") on node \"crc\" DevicePath \"\"" Dec 06 15:49:38 crc kubenswrapper[5003]: I1206 15:49:38.852471 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wwtgl\" (UniqueName: \"kubernetes.io/projected/20390cae-6054-4766-8ff4-48402fd00916-kube-api-access-wwtgl\") on node \"crc\" DevicePath \"\"" Dec 06 15:49:39 crc kubenswrapper[5003]: I1206 15:49:39.344348 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590fzkbm" event={"ID":"20390cae-6054-4766-8ff4-48402fd00916","Type":"ContainerDied","Data":"85950e83b69cb0039e4d6b8f96d5a894646fc9d750ddcffa17f28e5c24a5e0a0"} Dec 06 15:49:39 crc kubenswrapper[5003]: I1206 15:49:39.344875 5003 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="85950e83b69cb0039e4d6b8f96d5a894646fc9d750ddcffa17f28e5c24a5e0a0" Dec 06 15:49:39 crc kubenswrapper[5003]: I1206 15:49:39.344450 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590fzkbm" Dec 06 15:49:46 crc kubenswrapper[5003]: I1206 15:49:46.342460 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-779fc9694b-gbbml"] Dec 06 15:49:46 crc kubenswrapper[5003]: E1206 15:49:46.343359 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="20390cae-6054-4766-8ff4-48402fd00916" containerName="extract" Dec 06 15:49:46 crc kubenswrapper[5003]: I1206 15:49:46.343378 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="20390cae-6054-4766-8ff4-48402fd00916" containerName="extract" Dec 06 15:49:46 crc kubenswrapper[5003]: E1206 15:49:46.343391 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="20390cae-6054-4766-8ff4-48402fd00916" containerName="util" Dec 06 15:49:46 crc kubenswrapper[5003]: I1206 15:49:46.343398 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="20390cae-6054-4766-8ff4-48402fd00916" containerName="util" Dec 06 15:49:46 crc kubenswrapper[5003]: E1206 15:49:46.343421 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="20390cae-6054-4766-8ff4-48402fd00916" containerName="pull" Dec 06 15:49:46 crc kubenswrapper[5003]: I1206 15:49:46.343430 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="20390cae-6054-4766-8ff4-48402fd00916" containerName="pull" Dec 06 15:49:46 crc kubenswrapper[5003]: I1206 15:49:46.343592 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="20390cae-6054-4766-8ff4-48402fd00916" containerName="extract" Dec 06 15:49:46 crc kubenswrapper[5003]: I1206 15:49:46.344218 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-779fc9694b-gbbml" Dec 06 15:49:46 crc kubenswrapper[5003]: I1206 15:49:46.347045 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"rabbitmq-cluster-operator-dockercfg-76lgw" Dec 06 15:49:46 crc kubenswrapper[5003]: I1206 15:49:46.355442 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-779fc9694b-gbbml"] Dec 06 15:49:46 crc kubenswrapper[5003]: I1206 15:49:46.473177 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zxdvq\" (UniqueName: \"kubernetes.io/projected/b87b54c4-3c22-4d05-b500-14a1cc8e99bb-kube-api-access-zxdvq\") pod \"rabbitmq-cluster-operator-779fc9694b-gbbml\" (UID: \"b87b54c4-3c22-4d05-b500-14a1cc8e99bb\") " pod="openstack-operators/rabbitmq-cluster-operator-779fc9694b-gbbml" Dec 06 15:49:46 crc kubenswrapper[5003]: I1206 15:49:46.575156 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zxdvq\" (UniqueName: \"kubernetes.io/projected/b87b54c4-3c22-4d05-b500-14a1cc8e99bb-kube-api-access-zxdvq\") pod \"rabbitmq-cluster-operator-779fc9694b-gbbml\" (UID: \"b87b54c4-3c22-4d05-b500-14a1cc8e99bb\") " pod="openstack-operators/rabbitmq-cluster-operator-779fc9694b-gbbml" Dec 06 15:49:46 crc kubenswrapper[5003]: I1206 15:49:46.601788 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zxdvq\" (UniqueName: \"kubernetes.io/projected/b87b54c4-3c22-4d05-b500-14a1cc8e99bb-kube-api-access-zxdvq\") pod \"rabbitmq-cluster-operator-779fc9694b-gbbml\" (UID: \"b87b54c4-3c22-4d05-b500-14a1cc8e99bb\") " pod="openstack-operators/rabbitmq-cluster-operator-779fc9694b-gbbml" Dec 06 15:49:46 crc kubenswrapper[5003]: I1206 15:49:46.667058 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-779fc9694b-gbbml" Dec 06 15:49:47 crc kubenswrapper[5003]: I1206 15:49:47.081367 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-779fc9694b-gbbml"] Dec 06 15:49:47 crc kubenswrapper[5003]: W1206 15:49:47.087181 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-podb87b54c4_3c22_4d05_b500_14a1cc8e99bb.slice/crio-1c27f2304d21d1006d83c9089945b2d5a09602f99bfa434e1f43cbaafe25a696 WatchSource:0}: Error finding container 1c27f2304d21d1006d83c9089945b2d5a09602f99bfa434e1f43cbaafe25a696: Status 404 returned error can't find the container with id 1c27f2304d21d1006d83c9089945b2d5a09602f99bfa434e1f43cbaafe25a696 Dec 06 15:49:47 crc kubenswrapper[5003]: I1206 15:49:47.399002 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-779fc9694b-gbbml" event={"ID":"b87b54c4-3c22-4d05-b500-14a1cc8e99bb","Type":"ContainerStarted","Data":"1c27f2304d21d1006d83c9089945b2d5a09602f99bfa434e1f43cbaafe25a696"} Dec 06 15:49:50 crc kubenswrapper[5003]: I1206 15:49:50.424051 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-779fc9694b-gbbml" event={"ID":"b87b54c4-3c22-4d05-b500-14a1cc8e99bb","Type":"ContainerStarted","Data":"6e72a8f902d950826f857587b6917fc5c30e9e42ade348f050a329d8ae50dddb"} Dec 06 15:49:50 crc kubenswrapper[5003]: I1206 15:49:50.449301 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/rabbitmq-cluster-operator-779fc9694b-gbbml" podStartSLOduration=1.3541090740000001 podStartE2EDuration="4.449283607s" podCreationTimestamp="2025-12-06 15:49:46 +0000 UTC" firstStartedPulling="2025-12-06 15:49:47.090207538 +0000 UTC m=+1065.623561909" lastFinishedPulling="2025-12-06 15:49:50.185382061 +0000 UTC m=+1068.718736442" observedRunningTime="2025-12-06 15:49:50.447091807 +0000 UTC m=+1068.980446198" watchObservedRunningTime="2025-12-06 15:49:50.449283607 +0000 UTC m=+1068.982637988" Dec 06 15:49:54 crc kubenswrapper[5003]: I1206 15:49:54.681302 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["horizon-kuttl-tests/rabbitmq-server-0"] Dec 06 15:49:54 crc kubenswrapper[5003]: I1206 15:49:54.682734 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="horizon-kuttl-tests/rabbitmq-server-0" Dec 06 15:49:54 crc kubenswrapper[5003]: I1206 15:49:54.685886 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"horizon-kuttl-tests"/"rabbitmq-server-conf" Dec 06 15:49:54 crc kubenswrapper[5003]: I1206 15:49:54.685902 5003 reflector.go:368] Caches populated for *v1.Secret from object-"horizon-kuttl-tests"/"rabbitmq-server-dockercfg-hnnxj" Dec 06 15:49:54 crc kubenswrapper[5003]: I1206 15:49:54.685943 5003 reflector.go:368] Caches populated for *v1.Secret from object-"horizon-kuttl-tests"/"rabbitmq-erlang-cookie" Dec 06 15:49:54 crc kubenswrapper[5003]: I1206 15:49:54.685983 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"horizon-kuttl-tests"/"rabbitmq-plugins-conf" Dec 06 15:49:54 crc kubenswrapper[5003]: I1206 15:49:54.686977 5003 reflector.go:368] Caches populated for *v1.Secret from object-"horizon-kuttl-tests"/"rabbitmq-default-user" Dec 06 15:49:54 crc kubenswrapper[5003]: I1206 15:49:54.699110 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["horizon-kuttl-tests/rabbitmq-server-0"] Dec 06 15:49:54 crc kubenswrapper[5003]: I1206 15:49:54.794422 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/27124a81-a0ad-4bc6-ad89-d2f5738570bc-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"27124a81-a0ad-4bc6-ad89-d2f5738570bc\") " pod="horizon-kuttl-tests/rabbitmq-server-0" Dec 06 15:49:54 crc kubenswrapper[5003]: I1206 15:49:54.794476 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/27124a81-a0ad-4bc6-ad89-d2f5738570bc-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"27124a81-a0ad-4bc6-ad89-d2f5738570bc\") " pod="horizon-kuttl-tests/rabbitmq-server-0" Dec 06 15:49:54 crc kubenswrapper[5003]: I1206 15:49:54.794515 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/27124a81-a0ad-4bc6-ad89-d2f5738570bc-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"27124a81-a0ad-4bc6-ad89-d2f5738570bc\") " pod="horizon-kuttl-tests/rabbitmq-server-0" Dec 06 15:49:54 crc kubenswrapper[5003]: I1206 15:49:54.794582 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-fc028333-0fe0-4309-89b9-caaa4c0882da\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-fc028333-0fe0-4309-89b9-caaa4c0882da\") pod \"rabbitmq-server-0\" (UID: \"27124a81-a0ad-4bc6-ad89-d2f5738570bc\") " pod="horizon-kuttl-tests/rabbitmq-server-0" Dec 06 15:49:54 crc kubenswrapper[5003]: I1206 15:49:54.794660 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/27124a81-a0ad-4bc6-ad89-d2f5738570bc-pod-info\") pod \"rabbitmq-server-0\" (UID: \"27124a81-a0ad-4bc6-ad89-d2f5738570bc\") " pod="horizon-kuttl-tests/rabbitmq-server-0" Dec 06 15:49:54 crc kubenswrapper[5003]: I1206 15:49:54.794701 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/27124a81-a0ad-4bc6-ad89-d2f5738570bc-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"27124a81-a0ad-4bc6-ad89-d2f5738570bc\") " pod="horizon-kuttl-tests/rabbitmq-server-0" Dec 06 15:49:54 crc kubenswrapper[5003]: I1206 15:49:54.794733 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vk5tk\" (UniqueName: \"kubernetes.io/projected/27124a81-a0ad-4bc6-ad89-d2f5738570bc-kube-api-access-vk5tk\") pod \"rabbitmq-server-0\" (UID: \"27124a81-a0ad-4bc6-ad89-d2f5738570bc\") " pod="horizon-kuttl-tests/rabbitmq-server-0" Dec 06 15:49:54 crc kubenswrapper[5003]: I1206 15:49:54.794781 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/27124a81-a0ad-4bc6-ad89-d2f5738570bc-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"27124a81-a0ad-4bc6-ad89-d2f5738570bc\") " pod="horizon-kuttl-tests/rabbitmq-server-0" Dec 06 15:49:54 crc kubenswrapper[5003]: I1206 15:49:54.895638 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vk5tk\" (UniqueName: \"kubernetes.io/projected/27124a81-a0ad-4bc6-ad89-d2f5738570bc-kube-api-access-vk5tk\") pod \"rabbitmq-server-0\" (UID: \"27124a81-a0ad-4bc6-ad89-d2f5738570bc\") " pod="horizon-kuttl-tests/rabbitmq-server-0" Dec 06 15:49:54 crc kubenswrapper[5003]: I1206 15:49:54.895709 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/27124a81-a0ad-4bc6-ad89-d2f5738570bc-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"27124a81-a0ad-4bc6-ad89-d2f5738570bc\") " pod="horizon-kuttl-tests/rabbitmq-server-0" Dec 06 15:49:54 crc kubenswrapper[5003]: I1206 15:49:54.895756 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/27124a81-a0ad-4bc6-ad89-d2f5738570bc-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"27124a81-a0ad-4bc6-ad89-d2f5738570bc\") " pod="horizon-kuttl-tests/rabbitmq-server-0" Dec 06 15:49:54 crc kubenswrapper[5003]: I1206 15:49:54.895775 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/27124a81-a0ad-4bc6-ad89-d2f5738570bc-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"27124a81-a0ad-4bc6-ad89-d2f5738570bc\") " pod="horizon-kuttl-tests/rabbitmq-server-0" Dec 06 15:49:54 crc kubenswrapper[5003]: I1206 15:49:54.895795 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/27124a81-a0ad-4bc6-ad89-d2f5738570bc-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"27124a81-a0ad-4bc6-ad89-d2f5738570bc\") " pod="horizon-kuttl-tests/rabbitmq-server-0" Dec 06 15:49:54 crc kubenswrapper[5003]: I1206 15:49:54.895824 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-fc028333-0fe0-4309-89b9-caaa4c0882da\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-fc028333-0fe0-4309-89b9-caaa4c0882da\") pod \"rabbitmq-server-0\" (UID: \"27124a81-a0ad-4bc6-ad89-d2f5738570bc\") " pod="horizon-kuttl-tests/rabbitmq-server-0" Dec 06 15:49:54 crc kubenswrapper[5003]: I1206 15:49:54.895915 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/27124a81-a0ad-4bc6-ad89-d2f5738570bc-pod-info\") pod \"rabbitmq-server-0\" (UID: \"27124a81-a0ad-4bc6-ad89-d2f5738570bc\") " pod="horizon-kuttl-tests/rabbitmq-server-0" Dec 06 15:49:54 crc kubenswrapper[5003]: I1206 15:49:54.895939 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/27124a81-a0ad-4bc6-ad89-d2f5738570bc-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"27124a81-a0ad-4bc6-ad89-d2f5738570bc\") " pod="horizon-kuttl-tests/rabbitmq-server-0" Dec 06 15:49:54 crc kubenswrapper[5003]: I1206 15:49:54.897704 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/27124a81-a0ad-4bc6-ad89-d2f5738570bc-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"27124a81-a0ad-4bc6-ad89-d2f5738570bc\") " pod="horizon-kuttl-tests/rabbitmq-server-0" Dec 06 15:49:54 crc kubenswrapper[5003]: I1206 15:49:54.897710 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/27124a81-a0ad-4bc6-ad89-d2f5738570bc-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"27124a81-a0ad-4bc6-ad89-d2f5738570bc\") " pod="horizon-kuttl-tests/rabbitmq-server-0" Dec 06 15:49:54 crc kubenswrapper[5003]: I1206 15:49:54.898610 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/27124a81-a0ad-4bc6-ad89-d2f5738570bc-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"27124a81-a0ad-4bc6-ad89-d2f5738570bc\") " pod="horizon-kuttl-tests/rabbitmq-server-0" Dec 06 15:49:54 crc kubenswrapper[5003]: I1206 15:49:54.899620 5003 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 06 15:49:54 crc kubenswrapper[5003]: I1206 15:49:54.899662 5003 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-fc028333-0fe0-4309-89b9-caaa4c0882da\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-fc028333-0fe0-4309-89b9-caaa4c0882da\") pod \"rabbitmq-server-0\" (UID: \"27124a81-a0ad-4bc6-ad89-d2f5738570bc\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/2ac80498db5b1b9623ab83ff8abaa2d2b9f11e88e66a051e6932d0eb13ab954f/globalmount\"" pod="horizon-kuttl-tests/rabbitmq-server-0" Dec 06 15:49:54 crc kubenswrapper[5003]: I1206 15:49:54.903265 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/27124a81-a0ad-4bc6-ad89-d2f5738570bc-pod-info\") pod \"rabbitmq-server-0\" (UID: \"27124a81-a0ad-4bc6-ad89-d2f5738570bc\") " pod="horizon-kuttl-tests/rabbitmq-server-0" Dec 06 15:49:54 crc kubenswrapper[5003]: I1206 15:49:54.904390 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/27124a81-a0ad-4bc6-ad89-d2f5738570bc-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"27124a81-a0ad-4bc6-ad89-d2f5738570bc\") " pod="horizon-kuttl-tests/rabbitmq-server-0" Dec 06 15:49:54 crc kubenswrapper[5003]: I1206 15:49:54.910378 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/27124a81-a0ad-4bc6-ad89-d2f5738570bc-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"27124a81-a0ad-4bc6-ad89-d2f5738570bc\") " pod="horizon-kuttl-tests/rabbitmq-server-0" Dec 06 15:49:54 crc kubenswrapper[5003]: I1206 15:49:54.927835 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-fc028333-0fe0-4309-89b9-caaa4c0882da\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-fc028333-0fe0-4309-89b9-caaa4c0882da\") pod \"rabbitmq-server-0\" (UID: \"27124a81-a0ad-4bc6-ad89-d2f5738570bc\") " pod="horizon-kuttl-tests/rabbitmq-server-0" Dec 06 15:49:54 crc kubenswrapper[5003]: I1206 15:49:54.928379 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vk5tk\" (UniqueName: \"kubernetes.io/projected/27124a81-a0ad-4bc6-ad89-d2f5738570bc-kube-api-access-vk5tk\") pod \"rabbitmq-server-0\" (UID: \"27124a81-a0ad-4bc6-ad89-d2f5738570bc\") " pod="horizon-kuttl-tests/rabbitmq-server-0" Dec 06 15:49:55 crc kubenswrapper[5003]: I1206 15:49:55.008713 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="horizon-kuttl-tests/rabbitmq-server-0" Dec 06 15:49:55 crc kubenswrapper[5003]: I1206 15:49:55.414758 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["horizon-kuttl-tests/rabbitmq-server-0"] Dec 06 15:49:55 crc kubenswrapper[5003]: I1206 15:49:55.456037 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="horizon-kuttl-tests/rabbitmq-server-0" event={"ID":"27124a81-a0ad-4bc6-ad89-d2f5738570bc","Type":"ContainerStarted","Data":"7bc373d08e7144f50ba119cac94dfd299766858f73f931d5488697052288770e"} Dec 06 15:49:56 crc kubenswrapper[5003]: I1206 15:49:56.466865 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/keystone-operator-index-4qn9h"] Dec 06 15:49:56 crc kubenswrapper[5003]: I1206 15:49:56.467784 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-index-4qn9h" Dec 06 15:49:56 crc kubenswrapper[5003]: I1206 15:49:56.470760 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"keystone-operator-index-dockercfg-rdk9j" Dec 06 15:49:56 crc kubenswrapper[5003]: I1206 15:49:56.475015 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-index-4qn9h"] Dec 06 15:49:56 crc kubenswrapper[5003]: I1206 15:49:56.629637 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ktc25\" (UniqueName: \"kubernetes.io/projected/ff5afab4-f287-43ad-bf14-7ac8c90a52e3-kube-api-access-ktc25\") pod \"keystone-operator-index-4qn9h\" (UID: \"ff5afab4-f287-43ad-bf14-7ac8c90a52e3\") " pod="openstack-operators/keystone-operator-index-4qn9h" Dec 06 15:49:56 crc kubenswrapper[5003]: I1206 15:49:56.731193 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ktc25\" (UniqueName: \"kubernetes.io/projected/ff5afab4-f287-43ad-bf14-7ac8c90a52e3-kube-api-access-ktc25\") pod \"keystone-operator-index-4qn9h\" (UID: \"ff5afab4-f287-43ad-bf14-7ac8c90a52e3\") " pod="openstack-operators/keystone-operator-index-4qn9h" Dec 06 15:49:56 crc kubenswrapper[5003]: I1206 15:49:56.751435 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ktc25\" (UniqueName: \"kubernetes.io/projected/ff5afab4-f287-43ad-bf14-7ac8c90a52e3-kube-api-access-ktc25\") pod \"keystone-operator-index-4qn9h\" (UID: \"ff5afab4-f287-43ad-bf14-7ac8c90a52e3\") " pod="openstack-operators/keystone-operator-index-4qn9h" Dec 06 15:49:56 crc kubenswrapper[5003]: I1206 15:49:56.794247 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-index-4qn9h" Dec 06 15:49:57 crc kubenswrapper[5003]: I1206 15:49:57.255445 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-index-4qn9h"] Dec 06 15:49:57 crc kubenswrapper[5003]: W1206 15:49:57.275282 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podff5afab4_f287_43ad_bf14_7ac8c90a52e3.slice/crio-3891a655096b5666baa35b4f56280ad80389a779d56aaa840ffee84b29979573 WatchSource:0}: Error finding container 3891a655096b5666baa35b4f56280ad80389a779d56aaa840ffee84b29979573: Status 404 returned error can't find the container with id 3891a655096b5666baa35b4f56280ad80389a779d56aaa840ffee84b29979573 Dec 06 15:49:57 crc kubenswrapper[5003]: I1206 15:49:57.481515 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-index-4qn9h" event={"ID":"ff5afab4-f287-43ad-bf14-7ac8c90a52e3","Type":"ContainerStarted","Data":"3891a655096b5666baa35b4f56280ad80389a779d56aaa840ffee84b29979573"} Dec 06 15:50:03 crc kubenswrapper[5003]: I1206 15:50:03.523341 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-index-4qn9h" event={"ID":"ff5afab4-f287-43ad-bf14-7ac8c90a52e3","Type":"ContainerStarted","Data":"42f4134bfb546e8795f6c7a557eac50c015cd43f87e79a13c6d099d6721f0418"} Dec 06 15:50:04 crc kubenswrapper[5003]: I1206 15:50:04.533678 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="horizon-kuttl-tests/rabbitmq-server-0" event={"ID":"27124a81-a0ad-4bc6-ad89-d2f5738570bc","Type":"ContainerStarted","Data":"6e16f162181f4e0cf41ba4628b056e20a6058930d1b6b01cd8815905709818b5"} Dec 06 15:50:04 crc kubenswrapper[5003]: I1206 15:50:04.559357 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/keystone-operator-index-4qn9h" podStartSLOduration=3.190363545 podStartE2EDuration="8.559330226s" podCreationTimestamp="2025-12-06 15:49:56 +0000 UTC" firstStartedPulling="2025-12-06 15:49:57.278052381 +0000 UTC m=+1075.811406762" lastFinishedPulling="2025-12-06 15:50:02.647019062 +0000 UTC m=+1081.180373443" observedRunningTime="2025-12-06 15:50:03.542388265 +0000 UTC m=+1082.075742656" watchObservedRunningTime="2025-12-06 15:50:04.559330226 +0000 UTC m=+1083.092684607" Dec 06 15:50:06 crc kubenswrapper[5003]: I1206 15:50:06.795345 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/keystone-operator-index-4qn9h" Dec 06 15:50:06 crc kubenswrapper[5003]: I1206 15:50:06.795629 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-operators/keystone-operator-index-4qn9h" Dec 06 15:50:06 crc kubenswrapper[5003]: I1206 15:50:06.824455 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-operators/keystone-operator-index-4qn9h" Dec 06 15:50:07 crc kubenswrapper[5003]: I1206 15:50:07.578652 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/keystone-operator-index-4qn9h" Dec 06 15:50:11 crc kubenswrapper[5003]: I1206 15:50:11.303855 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/835551ba8f27f4fd61e1b05ebed5cb285496b645cbb6fd0ac403227c856dznq"] Dec 06 15:50:11 crc kubenswrapper[5003]: I1206 15:50:11.306096 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/835551ba8f27f4fd61e1b05ebed5cb285496b645cbb6fd0ac403227c856dznq" Dec 06 15:50:11 crc kubenswrapper[5003]: I1206 15:50:11.307902 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"default-dockercfg-kqjk5" Dec 06 15:50:11 crc kubenswrapper[5003]: I1206 15:50:11.315065 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/835551ba8f27f4fd61e1b05ebed5cb285496b645cbb6fd0ac403227c856dznq"] Dec 06 15:50:11 crc kubenswrapper[5003]: I1206 15:50:11.463015 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mgk6h\" (UniqueName: \"kubernetes.io/projected/03fe4942-5f11-47e3-86ec-83f19111191a-kube-api-access-mgk6h\") pod \"835551ba8f27f4fd61e1b05ebed5cb285496b645cbb6fd0ac403227c856dznq\" (UID: \"03fe4942-5f11-47e3-86ec-83f19111191a\") " pod="openstack-operators/835551ba8f27f4fd61e1b05ebed5cb285496b645cbb6fd0ac403227c856dznq" Dec 06 15:50:11 crc kubenswrapper[5003]: I1206 15:50:11.463069 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/03fe4942-5f11-47e3-86ec-83f19111191a-bundle\") pod \"835551ba8f27f4fd61e1b05ebed5cb285496b645cbb6fd0ac403227c856dznq\" (UID: \"03fe4942-5f11-47e3-86ec-83f19111191a\") " pod="openstack-operators/835551ba8f27f4fd61e1b05ebed5cb285496b645cbb6fd0ac403227c856dznq" Dec 06 15:50:11 crc kubenswrapper[5003]: I1206 15:50:11.463101 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/03fe4942-5f11-47e3-86ec-83f19111191a-util\") pod \"835551ba8f27f4fd61e1b05ebed5cb285496b645cbb6fd0ac403227c856dznq\" (UID: \"03fe4942-5f11-47e3-86ec-83f19111191a\") " pod="openstack-operators/835551ba8f27f4fd61e1b05ebed5cb285496b645cbb6fd0ac403227c856dznq" Dec 06 15:50:11 crc kubenswrapper[5003]: I1206 15:50:11.564745 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mgk6h\" (UniqueName: \"kubernetes.io/projected/03fe4942-5f11-47e3-86ec-83f19111191a-kube-api-access-mgk6h\") pod \"835551ba8f27f4fd61e1b05ebed5cb285496b645cbb6fd0ac403227c856dznq\" (UID: \"03fe4942-5f11-47e3-86ec-83f19111191a\") " pod="openstack-operators/835551ba8f27f4fd61e1b05ebed5cb285496b645cbb6fd0ac403227c856dznq" Dec 06 15:50:11 crc kubenswrapper[5003]: I1206 15:50:11.564839 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/03fe4942-5f11-47e3-86ec-83f19111191a-bundle\") pod \"835551ba8f27f4fd61e1b05ebed5cb285496b645cbb6fd0ac403227c856dznq\" (UID: \"03fe4942-5f11-47e3-86ec-83f19111191a\") " pod="openstack-operators/835551ba8f27f4fd61e1b05ebed5cb285496b645cbb6fd0ac403227c856dznq" Dec 06 15:50:11 crc kubenswrapper[5003]: I1206 15:50:11.564871 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/03fe4942-5f11-47e3-86ec-83f19111191a-util\") pod \"835551ba8f27f4fd61e1b05ebed5cb285496b645cbb6fd0ac403227c856dznq\" (UID: \"03fe4942-5f11-47e3-86ec-83f19111191a\") " pod="openstack-operators/835551ba8f27f4fd61e1b05ebed5cb285496b645cbb6fd0ac403227c856dznq" Dec 06 15:50:11 crc kubenswrapper[5003]: I1206 15:50:11.565852 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/03fe4942-5f11-47e3-86ec-83f19111191a-util\") pod \"835551ba8f27f4fd61e1b05ebed5cb285496b645cbb6fd0ac403227c856dznq\" (UID: \"03fe4942-5f11-47e3-86ec-83f19111191a\") " pod="openstack-operators/835551ba8f27f4fd61e1b05ebed5cb285496b645cbb6fd0ac403227c856dznq" Dec 06 15:50:11 crc kubenswrapper[5003]: I1206 15:50:11.565866 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/03fe4942-5f11-47e3-86ec-83f19111191a-bundle\") pod \"835551ba8f27f4fd61e1b05ebed5cb285496b645cbb6fd0ac403227c856dznq\" (UID: \"03fe4942-5f11-47e3-86ec-83f19111191a\") " pod="openstack-operators/835551ba8f27f4fd61e1b05ebed5cb285496b645cbb6fd0ac403227c856dznq" Dec 06 15:50:11 crc kubenswrapper[5003]: I1206 15:50:11.589585 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mgk6h\" (UniqueName: \"kubernetes.io/projected/03fe4942-5f11-47e3-86ec-83f19111191a-kube-api-access-mgk6h\") pod \"835551ba8f27f4fd61e1b05ebed5cb285496b645cbb6fd0ac403227c856dznq\" (UID: \"03fe4942-5f11-47e3-86ec-83f19111191a\") " pod="openstack-operators/835551ba8f27f4fd61e1b05ebed5cb285496b645cbb6fd0ac403227c856dznq" Dec 06 15:50:11 crc kubenswrapper[5003]: I1206 15:50:11.668940 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/835551ba8f27f4fd61e1b05ebed5cb285496b645cbb6fd0ac403227c856dznq" Dec 06 15:50:12 crc kubenswrapper[5003]: I1206 15:50:12.128224 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/835551ba8f27f4fd61e1b05ebed5cb285496b645cbb6fd0ac403227c856dznq"] Dec 06 15:50:12 crc kubenswrapper[5003]: W1206 15:50:12.138940 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod03fe4942_5f11_47e3_86ec_83f19111191a.slice/crio-50c9ab19087e54e40d465a5d2583e84eae734109bbc8d6bc497ce3f2778b134d WatchSource:0}: Error finding container 50c9ab19087e54e40d465a5d2583e84eae734109bbc8d6bc497ce3f2778b134d: Status 404 returned error can't find the container with id 50c9ab19087e54e40d465a5d2583e84eae734109bbc8d6bc497ce3f2778b134d Dec 06 15:50:12 crc kubenswrapper[5003]: I1206 15:50:12.590811 5003 generic.go:334] "Generic (PLEG): container finished" podID="03fe4942-5f11-47e3-86ec-83f19111191a" containerID="9c3cd840a8bfadb331b3c541574fded66678f4dc0928439e880d04889221b457" exitCode=0 Dec 06 15:50:12 crc kubenswrapper[5003]: I1206 15:50:12.590901 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/835551ba8f27f4fd61e1b05ebed5cb285496b645cbb6fd0ac403227c856dznq" event={"ID":"03fe4942-5f11-47e3-86ec-83f19111191a","Type":"ContainerDied","Data":"9c3cd840a8bfadb331b3c541574fded66678f4dc0928439e880d04889221b457"} Dec 06 15:50:12 crc kubenswrapper[5003]: I1206 15:50:12.591048 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/835551ba8f27f4fd61e1b05ebed5cb285496b645cbb6fd0ac403227c856dznq" event={"ID":"03fe4942-5f11-47e3-86ec-83f19111191a","Type":"ContainerStarted","Data":"50c9ab19087e54e40d465a5d2583e84eae734109bbc8d6bc497ce3f2778b134d"} Dec 06 15:50:13 crc kubenswrapper[5003]: I1206 15:50:13.598530 5003 generic.go:334] "Generic (PLEG): container finished" podID="03fe4942-5f11-47e3-86ec-83f19111191a" containerID="61043170fc7826b1fca90d7b07bc19e9b2e3dac557173f07c80068e8c398ab34" exitCode=0 Dec 06 15:50:13 crc kubenswrapper[5003]: I1206 15:50:13.598571 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/835551ba8f27f4fd61e1b05ebed5cb285496b645cbb6fd0ac403227c856dznq" event={"ID":"03fe4942-5f11-47e3-86ec-83f19111191a","Type":"ContainerDied","Data":"61043170fc7826b1fca90d7b07bc19e9b2e3dac557173f07c80068e8c398ab34"} Dec 06 15:50:14 crc kubenswrapper[5003]: I1206 15:50:14.609572 5003 generic.go:334] "Generic (PLEG): container finished" podID="03fe4942-5f11-47e3-86ec-83f19111191a" containerID="2be2e731a531caf7f63849f685eb6863a0f556de43b24c7db66f82e910442cff" exitCode=0 Dec 06 15:50:14 crc kubenswrapper[5003]: I1206 15:50:14.609674 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/835551ba8f27f4fd61e1b05ebed5cb285496b645cbb6fd0ac403227c856dznq" event={"ID":"03fe4942-5f11-47e3-86ec-83f19111191a","Type":"ContainerDied","Data":"2be2e731a531caf7f63849f685eb6863a0f556de43b24c7db66f82e910442cff"} Dec 06 15:50:15 crc kubenswrapper[5003]: I1206 15:50:15.925382 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/835551ba8f27f4fd61e1b05ebed5cb285496b645cbb6fd0ac403227c856dznq" Dec 06 15:50:16 crc kubenswrapper[5003]: I1206 15:50:16.030141 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mgk6h\" (UniqueName: \"kubernetes.io/projected/03fe4942-5f11-47e3-86ec-83f19111191a-kube-api-access-mgk6h\") pod \"03fe4942-5f11-47e3-86ec-83f19111191a\" (UID: \"03fe4942-5f11-47e3-86ec-83f19111191a\") " Dec 06 15:50:16 crc kubenswrapper[5003]: I1206 15:50:16.030602 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/03fe4942-5f11-47e3-86ec-83f19111191a-bundle\") pod \"03fe4942-5f11-47e3-86ec-83f19111191a\" (UID: \"03fe4942-5f11-47e3-86ec-83f19111191a\") " Dec 06 15:50:16 crc kubenswrapper[5003]: I1206 15:50:16.030862 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/03fe4942-5f11-47e3-86ec-83f19111191a-util\") pod \"03fe4942-5f11-47e3-86ec-83f19111191a\" (UID: \"03fe4942-5f11-47e3-86ec-83f19111191a\") " Dec 06 15:50:16 crc kubenswrapper[5003]: I1206 15:50:16.031856 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/03fe4942-5f11-47e3-86ec-83f19111191a-bundle" (OuterVolumeSpecName: "bundle") pod "03fe4942-5f11-47e3-86ec-83f19111191a" (UID: "03fe4942-5f11-47e3-86ec-83f19111191a"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 15:50:16 crc kubenswrapper[5003]: I1206 15:50:16.039153 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/03fe4942-5f11-47e3-86ec-83f19111191a-kube-api-access-mgk6h" (OuterVolumeSpecName: "kube-api-access-mgk6h") pod "03fe4942-5f11-47e3-86ec-83f19111191a" (UID: "03fe4942-5f11-47e3-86ec-83f19111191a"). InnerVolumeSpecName "kube-api-access-mgk6h". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 15:50:16 crc kubenswrapper[5003]: I1206 15:50:16.061984 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/03fe4942-5f11-47e3-86ec-83f19111191a-util" (OuterVolumeSpecName: "util") pod "03fe4942-5f11-47e3-86ec-83f19111191a" (UID: "03fe4942-5f11-47e3-86ec-83f19111191a"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 15:50:16 crc kubenswrapper[5003]: I1206 15:50:16.133238 5003 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/03fe4942-5f11-47e3-86ec-83f19111191a-util\") on node \"crc\" DevicePath \"\"" Dec 06 15:50:16 crc kubenswrapper[5003]: I1206 15:50:16.133283 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mgk6h\" (UniqueName: \"kubernetes.io/projected/03fe4942-5f11-47e3-86ec-83f19111191a-kube-api-access-mgk6h\") on node \"crc\" DevicePath \"\"" Dec 06 15:50:16 crc kubenswrapper[5003]: I1206 15:50:16.133296 5003 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/03fe4942-5f11-47e3-86ec-83f19111191a-bundle\") on node \"crc\" DevicePath \"\"" Dec 06 15:50:16 crc kubenswrapper[5003]: I1206 15:50:16.627970 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/835551ba8f27f4fd61e1b05ebed5cb285496b645cbb6fd0ac403227c856dznq" event={"ID":"03fe4942-5f11-47e3-86ec-83f19111191a","Type":"ContainerDied","Data":"50c9ab19087e54e40d465a5d2583e84eae734109bbc8d6bc497ce3f2778b134d"} Dec 06 15:50:16 crc kubenswrapper[5003]: I1206 15:50:16.628008 5003 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="50c9ab19087e54e40d465a5d2583e84eae734109bbc8d6bc497ce3f2778b134d" Dec 06 15:50:16 crc kubenswrapper[5003]: I1206 15:50:16.628169 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/835551ba8f27f4fd61e1b05ebed5cb285496b645cbb6fd0ac403227c856dznq" Dec 06 15:50:18 crc kubenswrapper[5003]: I1206 15:50:18.573077 5003 patch_prober.go:28] interesting pod/machine-config-daemon-w25db container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 06 15:50:18 crc kubenswrapper[5003]: I1206 15:50:18.573670 5003 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-w25db" podUID="1a047c4d-003e-4668-9b96-945eab34ab68" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 06 15:50:25 crc kubenswrapper[5003]: I1206 15:50:25.851606 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/keystone-operator-controller-manager-7fb9dff8cd-l4z2z"] Dec 06 15:50:25 crc kubenswrapper[5003]: E1206 15:50:25.852447 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="03fe4942-5f11-47e3-86ec-83f19111191a" containerName="extract" Dec 06 15:50:25 crc kubenswrapper[5003]: I1206 15:50:25.852465 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="03fe4942-5f11-47e3-86ec-83f19111191a" containerName="extract" Dec 06 15:50:25 crc kubenswrapper[5003]: E1206 15:50:25.852502 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="03fe4942-5f11-47e3-86ec-83f19111191a" containerName="util" Dec 06 15:50:25 crc kubenswrapper[5003]: I1206 15:50:25.852511 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="03fe4942-5f11-47e3-86ec-83f19111191a" containerName="util" Dec 06 15:50:25 crc kubenswrapper[5003]: E1206 15:50:25.852535 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="03fe4942-5f11-47e3-86ec-83f19111191a" containerName="pull" Dec 06 15:50:25 crc kubenswrapper[5003]: I1206 15:50:25.852546 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="03fe4942-5f11-47e3-86ec-83f19111191a" containerName="pull" Dec 06 15:50:25 crc kubenswrapper[5003]: I1206 15:50:25.852686 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="03fe4942-5f11-47e3-86ec-83f19111191a" containerName="extract" Dec 06 15:50:25 crc kubenswrapper[5003]: I1206 15:50:25.853238 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-7fb9dff8cd-l4z2z" Dec 06 15:50:25 crc kubenswrapper[5003]: I1206 15:50:25.893912 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"keystone-operator-controller-manager-service-cert" Dec 06 15:50:25 crc kubenswrapper[5003]: I1206 15:50:25.899919 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-7fb9dff8cd-l4z2z"] Dec 06 15:50:25 crc kubenswrapper[5003]: I1206 15:50:25.900117 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"keystone-operator-controller-manager-dockercfg-hwwdd" Dec 06 15:50:25 crc kubenswrapper[5003]: I1206 15:50:25.946460 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kbz5d\" (UniqueName: \"kubernetes.io/projected/eb973b37-d488-4739-9c25-96885cc3158b-kube-api-access-kbz5d\") pod \"keystone-operator-controller-manager-7fb9dff8cd-l4z2z\" (UID: \"eb973b37-d488-4739-9c25-96885cc3158b\") " pod="openstack-operators/keystone-operator-controller-manager-7fb9dff8cd-l4z2z" Dec 06 15:50:25 crc kubenswrapper[5003]: I1206 15:50:25.946590 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/eb973b37-d488-4739-9c25-96885cc3158b-webhook-cert\") pod \"keystone-operator-controller-manager-7fb9dff8cd-l4z2z\" (UID: \"eb973b37-d488-4739-9c25-96885cc3158b\") " pod="openstack-operators/keystone-operator-controller-manager-7fb9dff8cd-l4z2z" Dec 06 15:50:25 crc kubenswrapper[5003]: I1206 15:50:25.946699 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/eb973b37-d488-4739-9c25-96885cc3158b-apiservice-cert\") pod \"keystone-operator-controller-manager-7fb9dff8cd-l4z2z\" (UID: \"eb973b37-d488-4739-9c25-96885cc3158b\") " pod="openstack-operators/keystone-operator-controller-manager-7fb9dff8cd-l4z2z" Dec 06 15:50:26 crc kubenswrapper[5003]: I1206 15:50:26.048322 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kbz5d\" (UniqueName: \"kubernetes.io/projected/eb973b37-d488-4739-9c25-96885cc3158b-kube-api-access-kbz5d\") pod \"keystone-operator-controller-manager-7fb9dff8cd-l4z2z\" (UID: \"eb973b37-d488-4739-9c25-96885cc3158b\") " pod="openstack-operators/keystone-operator-controller-manager-7fb9dff8cd-l4z2z" Dec 06 15:50:26 crc kubenswrapper[5003]: I1206 15:50:26.048374 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/eb973b37-d488-4739-9c25-96885cc3158b-webhook-cert\") pod \"keystone-operator-controller-manager-7fb9dff8cd-l4z2z\" (UID: \"eb973b37-d488-4739-9c25-96885cc3158b\") " pod="openstack-operators/keystone-operator-controller-manager-7fb9dff8cd-l4z2z" Dec 06 15:50:26 crc kubenswrapper[5003]: I1206 15:50:26.048425 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/eb973b37-d488-4739-9c25-96885cc3158b-apiservice-cert\") pod \"keystone-operator-controller-manager-7fb9dff8cd-l4z2z\" (UID: \"eb973b37-d488-4739-9c25-96885cc3158b\") " pod="openstack-operators/keystone-operator-controller-manager-7fb9dff8cd-l4z2z" Dec 06 15:50:26 crc kubenswrapper[5003]: I1206 15:50:26.054433 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/eb973b37-d488-4739-9c25-96885cc3158b-webhook-cert\") pod \"keystone-operator-controller-manager-7fb9dff8cd-l4z2z\" (UID: \"eb973b37-d488-4739-9c25-96885cc3158b\") " pod="openstack-operators/keystone-operator-controller-manager-7fb9dff8cd-l4z2z" Dec 06 15:50:26 crc kubenswrapper[5003]: I1206 15:50:26.059154 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/eb973b37-d488-4739-9c25-96885cc3158b-apiservice-cert\") pod \"keystone-operator-controller-manager-7fb9dff8cd-l4z2z\" (UID: \"eb973b37-d488-4739-9c25-96885cc3158b\") " pod="openstack-operators/keystone-operator-controller-manager-7fb9dff8cd-l4z2z" Dec 06 15:50:26 crc kubenswrapper[5003]: I1206 15:50:26.071228 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kbz5d\" (UniqueName: \"kubernetes.io/projected/eb973b37-d488-4739-9c25-96885cc3158b-kube-api-access-kbz5d\") pod \"keystone-operator-controller-manager-7fb9dff8cd-l4z2z\" (UID: \"eb973b37-d488-4739-9c25-96885cc3158b\") " pod="openstack-operators/keystone-operator-controller-manager-7fb9dff8cd-l4z2z" Dec 06 15:50:26 crc kubenswrapper[5003]: I1206 15:50:26.209797 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-7fb9dff8cd-l4z2z" Dec 06 15:50:26 crc kubenswrapper[5003]: I1206 15:50:26.432773 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-7fb9dff8cd-l4z2z"] Dec 06 15:50:26 crc kubenswrapper[5003]: I1206 15:50:26.691284 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-7fb9dff8cd-l4z2z" event={"ID":"eb973b37-d488-4739-9c25-96885cc3158b","Type":"ContainerStarted","Data":"57989e3f5fd9397d579d20768ffd92e032a0245838e03851cb9fdd6f2406107f"} Dec 06 15:50:29 crc kubenswrapper[5003]: I1206 15:50:29.723781 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-7fb9dff8cd-l4z2z" event={"ID":"eb973b37-d488-4739-9c25-96885cc3158b","Type":"ContainerStarted","Data":"d4e5fe15f47d713486a4ea61a9d4517d28fc3ca1358eaf680cc145c4fa329d32"} Dec 06 15:50:29 crc kubenswrapper[5003]: I1206 15:50:29.724173 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/keystone-operator-controller-manager-7fb9dff8cd-l4z2z" Dec 06 15:50:29 crc kubenswrapper[5003]: I1206 15:50:29.737915 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/keystone-operator-controller-manager-7fb9dff8cd-l4z2z" podStartSLOduration=1.649924622 podStartE2EDuration="4.737887707s" podCreationTimestamp="2025-12-06 15:50:25 +0000 UTC" firstStartedPulling="2025-12-06 15:50:26.440340571 +0000 UTC m=+1104.973694952" lastFinishedPulling="2025-12-06 15:50:29.528303656 +0000 UTC m=+1108.061658037" observedRunningTime="2025-12-06 15:50:29.730722439 +0000 UTC m=+1108.264076840" watchObservedRunningTime="2025-12-06 15:50:29.737887707 +0000 UTC m=+1108.271242108" Dec 06 15:50:36 crc kubenswrapper[5003]: I1206 15:50:36.215704 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/keystone-operator-controller-manager-7fb9dff8cd-l4z2z" Dec 06 15:50:36 crc kubenswrapper[5003]: I1206 15:50:36.755168 5003 generic.go:334] "Generic (PLEG): container finished" podID="27124a81-a0ad-4bc6-ad89-d2f5738570bc" containerID="6e16f162181f4e0cf41ba4628b056e20a6058930d1b6b01cd8815905709818b5" exitCode=0 Dec 06 15:50:36 crc kubenswrapper[5003]: I1206 15:50:36.755213 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="horizon-kuttl-tests/rabbitmq-server-0" event={"ID":"27124a81-a0ad-4bc6-ad89-d2f5738570bc","Type":"ContainerDied","Data":"6e16f162181f4e0cf41ba4628b056e20a6058930d1b6b01cd8815905709818b5"} Dec 06 15:50:37 crc kubenswrapper[5003]: I1206 15:50:37.764850 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="horizon-kuttl-tests/rabbitmq-server-0" event={"ID":"27124a81-a0ad-4bc6-ad89-d2f5738570bc","Type":"ContainerStarted","Data":"5b0f91f5eeff936fbe0fe160ee20c396ff099899ce29f5cdd4fa1f12d6ea3004"} Dec 06 15:50:37 crc kubenswrapper[5003]: I1206 15:50:37.765142 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="horizon-kuttl-tests/rabbitmq-server-0" Dec 06 15:50:37 crc kubenswrapper[5003]: I1206 15:50:37.835150 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="horizon-kuttl-tests/rabbitmq-server-0" podStartSLOduration=37.620351834 podStartE2EDuration="44.835123747s" podCreationTimestamp="2025-12-06 15:49:53 +0000 UTC" firstStartedPulling="2025-12-06 15:49:55.425927425 +0000 UTC m=+1073.959281806" lastFinishedPulling="2025-12-06 15:50:02.640699328 +0000 UTC m=+1081.174053719" observedRunningTime="2025-12-06 15:50:37.810263353 +0000 UTC m=+1116.343617744" watchObservedRunningTime="2025-12-06 15:50:37.835123747 +0000 UTC m=+1116.368478128" Dec 06 15:50:38 crc kubenswrapper[5003]: I1206 15:50:38.634193 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["horizon-kuttl-tests/keystone-fdf1-account-create-update-px2s8"] Dec 06 15:50:38 crc kubenswrapper[5003]: I1206 15:50:38.635500 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="horizon-kuttl-tests/keystone-fdf1-account-create-update-px2s8" Dec 06 15:50:38 crc kubenswrapper[5003]: I1206 15:50:38.639773 5003 reflector.go:368] Caches populated for *v1.Secret from object-"horizon-kuttl-tests"/"keystone-db-secret" Dec 06 15:50:38 crc kubenswrapper[5003]: I1206 15:50:38.645517 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["horizon-kuttl-tests/keystone-fdf1-account-create-update-px2s8"] Dec 06 15:50:38 crc kubenswrapper[5003]: I1206 15:50:38.725281 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["horizon-kuttl-tests/keystone-db-create-nkp64"] Dec 06 15:50:38 crc kubenswrapper[5003]: I1206 15:50:38.726211 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="horizon-kuttl-tests/keystone-db-create-nkp64" Dec 06 15:50:38 crc kubenswrapper[5003]: I1206 15:50:38.732807 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["horizon-kuttl-tests/keystone-db-create-nkp64"] Dec 06 15:50:38 crc kubenswrapper[5003]: I1206 15:50:38.779601 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ab07907f-273d-4c4a-844d-74244e74ffe4-operator-scripts\") pod \"keystone-fdf1-account-create-update-px2s8\" (UID: \"ab07907f-273d-4c4a-844d-74244e74ffe4\") " pod="horizon-kuttl-tests/keystone-fdf1-account-create-update-px2s8" Dec 06 15:50:38 crc kubenswrapper[5003]: I1206 15:50:38.779969 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1c1789cc-9c03-4473-a097-337f66aa38e9-operator-scripts\") pod \"keystone-db-create-nkp64\" (UID: \"1c1789cc-9c03-4473-a097-337f66aa38e9\") " pod="horizon-kuttl-tests/keystone-db-create-nkp64" Dec 06 15:50:38 crc kubenswrapper[5003]: I1206 15:50:38.780030 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qd4zk\" (UniqueName: \"kubernetes.io/projected/1c1789cc-9c03-4473-a097-337f66aa38e9-kube-api-access-qd4zk\") pod \"keystone-db-create-nkp64\" (UID: \"1c1789cc-9c03-4473-a097-337f66aa38e9\") " pod="horizon-kuttl-tests/keystone-db-create-nkp64" Dec 06 15:50:38 crc kubenswrapper[5003]: I1206 15:50:38.780062 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4rjsb\" (UniqueName: \"kubernetes.io/projected/ab07907f-273d-4c4a-844d-74244e74ffe4-kube-api-access-4rjsb\") pod \"keystone-fdf1-account-create-update-px2s8\" (UID: \"ab07907f-273d-4c4a-844d-74244e74ffe4\") " pod="horizon-kuttl-tests/keystone-fdf1-account-create-update-px2s8" Dec 06 15:50:38 crc kubenswrapper[5003]: I1206 15:50:38.880960 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1c1789cc-9c03-4473-a097-337f66aa38e9-operator-scripts\") pod \"keystone-db-create-nkp64\" (UID: \"1c1789cc-9c03-4473-a097-337f66aa38e9\") " pod="horizon-kuttl-tests/keystone-db-create-nkp64" Dec 06 15:50:38 crc kubenswrapper[5003]: I1206 15:50:38.881107 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qd4zk\" (UniqueName: \"kubernetes.io/projected/1c1789cc-9c03-4473-a097-337f66aa38e9-kube-api-access-qd4zk\") pod \"keystone-db-create-nkp64\" (UID: \"1c1789cc-9c03-4473-a097-337f66aa38e9\") " pod="horizon-kuttl-tests/keystone-db-create-nkp64" Dec 06 15:50:38 crc kubenswrapper[5003]: I1206 15:50:38.881142 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4rjsb\" (UniqueName: \"kubernetes.io/projected/ab07907f-273d-4c4a-844d-74244e74ffe4-kube-api-access-4rjsb\") pod \"keystone-fdf1-account-create-update-px2s8\" (UID: \"ab07907f-273d-4c4a-844d-74244e74ffe4\") " pod="horizon-kuttl-tests/keystone-fdf1-account-create-update-px2s8" Dec 06 15:50:38 crc kubenswrapper[5003]: I1206 15:50:38.881181 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ab07907f-273d-4c4a-844d-74244e74ffe4-operator-scripts\") pod \"keystone-fdf1-account-create-update-px2s8\" (UID: \"ab07907f-273d-4c4a-844d-74244e74ffe4\") " pod="horizon-kuttl-tests/keystone-fdf1-account-create-update-px2s8" Dec 06 15:50:38 crc kubenswrapper[5003]: I1206 15:50:38.881772 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1c1789cc-9c03-4473-a097-337f66aa38e9-operator-scripts\") pod \"keystone-db-create-nkp64\" (UID: \"1c1789cc-9c03-4473-a097-337f66aa38e9\") " pod="horizon-kuttl-tests/keystone-db-create-nkp64" Dec 06 15:50:38 crc kubenswrapper[5003]: I1206 15:50:38.881925 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ab07907f-273d-4c4a-844d-74244e74ffe4-operator-scripts\") pod \"keystone-fdf1-account-create-update-px2s8\" (UID: \"ab07907f-273d-4c4a-844d-74244e74ffe4\") " pod="horizon-kuttl-tests/keystone-fdf1-account-create-update-px2s8" Dec 06 15:50:38 crc kubenswrapper[5003]: I1206 15:50:38.906650 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4rjsb\" (UniqueName: \"kubernetes.io/projected/ab07907f-273d-4c4a-844d-74244e74ffe4-kube-api-access-4rjsb\") pod \"keystone-fdf1-account-create-update-px2s8\" (UID: \"ab07907f-273d-4c4a-844d-74244e74ffe4\") " pod="horizon-kuttl-tests/keystone-fdf1-account-create-update-px2s8" Dec 06 15:50:38 crc kubenswrapper[5003]: I1206 15:50:38.915830 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qd4zk\" (UniqueName: \"kubernetes.io/projected/1c1789cc-9c03-4473-a097-337f66aa38e9-kube-api-access-qd4zk\") pod \"keystone-db-create-nkp64\" (UID: \"1c1789cc-9c03-4473-a097-337f66aa38e9\") " pod="horizon-kuttl-tests/keystone-db-create-nkp64" Dec 06 15:50:38 crc kubenswrapper[5003]: I1206 15:50:38.951432 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="horizon-kuttl-tests/keystone-fdf1-account-create-update-px2s8" Dec 06 15:50:39 crc kubenswrapper[5003]: I1206 15:50:39.040551 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="horizon-kuttl-tests/keystone-db-create-nkp64" Dec 06 15:50:39 crc kubenswrapper[5003]: I1206 15:50:39.438872 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["horizon-kuttl-tests/keystone-fdf1-account-create-update-px2s8"] Dec 06 15:50:39 crc kubenswrapper[5003]: W1206 15:50:39.439387 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podab07907f_273d_4c4a_844d_74244e74ffe4.slice/crio-4df3133e5742565bd3cea40b707e36af7a066313ae99dff12ae559e8d9ecf623 WatchSource:0}: Error finding container 4df3133e5742565bd3cea40b707e36af7a066313ae99dff12ae559e8d9ecf623: Status 404 returned error can't find the container with id 4df3133e5742565bd3cea40b707e36af7a066313ae99dff12ae559e8d9ecf623 Dec 06 15:50:39 crc kubenswrapper[5003]: I1206 15:50:39.543548 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["horizon-kuttl-tests/keystone-db-create-nkp64"] Dec 06 15:50:39 crc kubenswrapper[5003]: W1206 15:50:39.544941 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1c1789cc_9c03_4473_a097_337f66aa38e9.slice/crio-dc36734a39228efb99e27c90ee6e8299ab7587f671e8c0462f029d88462bee23 WatchSource:0}: Error finding container dc36734a39228efb99e27c90ee6e8299ab7587f671e8c0462f029d88462bee23: Status 404 returned error can't find the container with id dc36734a39228efb99e27c90ee6e8299ab7587f671e8c0462f029d88462bee23 Dec 06 15:50:39 crc kubenswrapper[5003]: I1206 15:50:39.779133 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="horizon-kuttl-tests/keystone-fdf1-account-create-update-px2s8" event={"ID":"ab07907f-273d-4c4a-844d-74244e74ffe4","Type":"ContainerStarted","Data":"d82d5cc08343004d706177acded13a64e739dc80cb19dc8256a8b7daf1f276c2"} Dec 06 15:50:39 crc kubenswrapper[5003]: I1206 15:50:39.779209 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="horizon-kuttl-tests/keystone-fdf1-account-create-update-px2s8" event={"ID":"ab07907f-273d-4c4a-844d-74244e74ffe4","Type":"ContainerStarted","Data":"4df3133e5742565bd3cea40b707e36af7a066313ae99dff12ae559e8d9ecf623"} Dec 06 15:50:39 crc kubenswrapper[5003]: I1206 15:50:39.780927 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="horizon-kuttl-tests/keystone-db-create-nkp64" event={"ID":"1c1789cc-9c03-4473-a097-337f66aa38e9","Type":"ContainerStarted","Data":"5bbecf4f38326399ef44b8146d9bda2d3582d30047f56f8aa0c02a184aac3010"} Dec 06 15:50:39 crc kubenswrapper[5003]: I1206 15:50:39.780981 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="horizon-kuttl-tests/keystone-db-create-nkp64" event={"ID":"1c1789cc-9c03-4473-a097-337f66aa38e9","Type":"ContainerStarted","Data":"dc36734a39228efb99e27c90ee6e8299ab7587f671e8c0462f029d88462bee23"} Dec 06 15:50:39 crc kubenswrapper[5003]: I1206 15:50:39.799114 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="horizon-kuttl-tests/keystone-fdf1-account-create-update-px2s8" podStartSLOduration=1.799085663 podStartE2EDuration="1.799085663s" podCreationTimestamp="2025-12-06 15:50:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 15:50:39.792320888 +0000 UTC m=+1118.325675279" watchObservedRunningTime="2025-12-06 15:50:39.799085663 +0000 UTC m=+1118.332440044" Dec 06 15:50:39 crc kubenswrapper[5003]: I1206 15:50:39.812944 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="horizon-kuttl-tests/keystone-db-create-nkp64" podStartSLOduration=1.8129203839999999 podStartE2EDuration="1.812920384s" podCreationTimestamp="2025-12-06 15:50:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 15:50:39.806738854 +0000 UTC m=+1118.340093245" watchObservedRunningTime="2025-12-06 15:50:39.812920384 +0000 UTC m=+1118.346274755" Dec 06 15:50:40 crc kubenswrapper[5003]: I1206 15:50:40.789182 5003 generic.go:334] "Generic (PLEG): container finished" podID="1c1789cc-9c03-4473-a097-337f66aa38e9" containerID="5bbecf4f38326399ef44b8146d9bda2d3582d30047f56f8aa0c02a184aac3010" exitCode=0 Dec 06 15:50:40 crc kubenswrapper[5003]: I1206 15:50:40.789249 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="horizon-kuttl-tests/keystone-db-create-nkp64" event={"ID":"1c1789cc-9c03-4473-a097-337f66aa38e9","Type":"ContainerDied","Data":"5bbecf4f38326399ef44b8146d9bda2d3582d30047f56f8aa0c02a184aac3010"} Dec 06 15:50:40 crc kubenswrapper[5003]: I1206 15:50:40.791100 5003 generic.go:334] "Generic (PLEG): container finished" podID="ab07907f-273d-4c4a-844d-74244e74ffe4" containerID="d82d5cc08343004d706177acded13a64e739dc80cb19dc8256a8b7daf1f276c2" exitCode=0 Dec 06 15:50:40 crc kubenswrapper[5003]: I1206 15:50:40.791130 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="horizon-kuttl-tests/keystone-fdf1-account-create-update-px2s8" event={"ID":"ab07907f-273d-4c4a-844d-74244e74ffe4","Type":"ContainerDied","Data":"d82d5cc08343004d706177acded13a64e739dc80cb19dc8256a8b7daf1f276c2"} Dec 06 15:50:41 crc kubenswrapper[5003]: I1206 15:50:41.466233 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/horizon-operator-index-9x6k2"] Dec 06 15:50:41 crc kubenswrapper[5003]: I1206 15:50:41.467220 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-index-9x6k2" Dec 06 15:50:41 crc kubenswrapper[5003]: I1206 15:50:41.472617 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"horizon-operator-index-dockercfg-mznjq" Dec 06 15:50:41 crc kubenswrapper[5003]: I1206 15:50:41.505640 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-index-9x6k2"] Dec 06 15:50:41 crc kubenswrapper[5003]: I1206 15:50:41.619744 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wrfsq\" (UniqueName: \"kubernetes.io/projected/14a9e8ed-58bb-4f3d-969a-76a2328ae4b0-kube-api-access-wrfsq\") pod \"horizon-operator-index-9x6k2\" (UID: \"14a9e8ed-58bb-4f3d-969a-76a2328ae4b0\") " pod="openstack-operators/horizon-operator-index-9x6k2" Dec 06 15:50:41 crc kubenswrapper[5003]: I1206 15:50:41.720925 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wrfsq\" (UniqueName: \"kubernetes.io/projected/14a9e8ed-58bb-4f3d-969a-76a2328ae4b0-kube-api-access-wrfsq\") pod \"horizon-operator-index-9x6k2\" (UID: \"14a9e8ed-58bb-4f3d-969a-76a2328ae4b0\") " pod="openstack-operators/horizon-operator-index-9x6k2" Dec 06 15:50:41 crc kubenswrapper[5003]: I1206 15:50:41.746648 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wrfsq\" (UniqueName: \"kubernetes.io/projected/14a9e8ed-58bb-4f3d-969a-76a2328ae4b0-kube-api-access-wrfsq\") pod \"horizon-operator-index-9x6k2\" (UID: \"14a9e8ed-58bb-4f3d-969a-76a2328ae4b0\") " pod="openstack-operators/horizon-operator-index-9x6k2" Dec 06 15:50:41 crc kubenswrapper[5003]: I1206 15:50:41.789579 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-index-9x6k2" Dec 06 15:50:42 crc kubenswrapper[5003]: I1206 15:50:42.163467 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="horizon-kuttl-tests/keystone-fdf1-account-create-update-px2s8" Dec 06 15:50:42 crc kubenswrapper[5003]: I1206 15:50:42.168263 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="horizon-kuttl-tests/keystone-db-create-nkp64" Dec 06 15:50:42 crc kubenswrapper[5003]: I1206 15:50:42.229248 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1c1789cc-9c03-4473-a097-337f66aa38e9-operator-scripts\") pod \"1c1789cc-9c03-4473-a097-337f66aa38e9\" (UID: \"1c1789cc-9c03-4473-a097-337f66aa38e9\") " Dec 06 15:50:42 crc kubenswrapper[5003]: I1206 15:50:42.229348 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ab07907f-273d-4c4a-844d-74244e74ffe4-operator-scripts\") pod \"ab07907f-273d-4c4a-844d-74244e74ffe4\" (UID: \"ab07907f-273d-4c4a-844d-74244e74ffe4\") " Dec 06 15:50:42 crc kubenswrapper[5003]: I1206 15:50:42.229431 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4rjsb\" (UniqueName: \"kubernetes.io/projected/ab07907f-273d-4c4a-844d-74244e74ffe4-kube-api-access-4rjsb\") pod \"ab07907f-273d-4c4a-844d-74244e74ffe4\" (UID: \"ab07907f-273d-4c4a-844d-74244e74ffe4\") " Dec 06 15:50:42 crc kubenswrapper[5003]: I1206 15:50:42.229463 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qd4zk\" (UniqueName: \"kubernetes.io/projected/1c1789cc-9c03-4473-a097-337f66aa38e9-kube-api-access-qd4zk\") pod \"1c1789cc-9c03-4473-a097-337f66aa38e9\" (UID: \"1c1789cc-9c03-4473-a097-337f66aa38e9\") " Dec 06 15:50:42 crc kubenswrapper[5003]: I1206 15:50:42.230344 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ab07907f-273d-4c4a-844d-74244e74ffe4-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "ab07907f-273d-4c4a-844d-74244e74ffe4" (UID: "ab07907f-273d-4c4a-844d-74244e74ffe4"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 15:50:42 crc kubenswrapper[5003]: I1206 15:50:42.230565 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1c1789cc-9c03-4473-a097-337f66aa38e9-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "1c1789cc-9c03-4473-a097-337f66aa38e9" (UID: "1c1789cc-9c03-4473-a097-337f66aa38e9"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 15:50:42 crc kubenswrapper[5003]: I1206 15:50:42.234623 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ab07907f-273d-4c4a-844d-74244e74ffe4-kube-api-access-4rjsb" (OuterVolumeSpecName: "kube-api-access-4rjsb") pod "ab07907f-273d-4c4a-844d-74244e74ffe4" (UID: "ab07907f-273d-4c4a-844d-74244e74ffe4"). InnerVolumeSpecName "kube-api-access-4rjsb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 15:50:42 crc kubenswrapper[5003]: I1206 15:50:42.234679 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1c1789cc-9c03-4473-a097-337f66aa38e9-kube-api-access-qd4zk" (OuterVolumeSpecName: "kube-api-access-qd4zk") pod "1c1789cc-9c03-4473-a097-337f66aa38e9" (UID: "1c1789cc-9c03-4473-a097-337f66aa38e9"). InnerVolumeSpecName "kube-api-access-qd4zk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 15:50:42 crc kubenswrapper[5003]: I1206 15:50:42.317402 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-index-9x6k2"] Dec 06 15:50:42 crc kubenswrapper[5003]: W1206 15:50:42.326326 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod14a9e8ed_58bb_4f3d_969a_76a2328ae4b0.slice/crio-bb30ef493563cbc66e5884da5f2c390efa1e852d9e298f1838b82b4860d9e01f WatchSource:0}: Error finding container bb30ef493563cbc66e5884da5f2c390efa1e852d9e298f1838b82b4860d9e01f: Status 404 returned error can't find the container with id bb30ef493563cbc66e5884da5f2c390efa1e852d9e298f1838b82b4860d9e01f Dec 06 15:50:42 crc kubenswrapper[5003]: I1206 15:50:42.331130 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4rjsb\" (UniqueName: \"kubernetes.io/projected/ab07907f-273d-4c4a-844d-74244e74ffe4-kube-api-access-4rjsb\") on node \"crc\" DevicePath \"\"" Dec 06 15:50:42 crc kubenswrapper[5003]: I1206 15:50:42.331160 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qd4zk\" (UniqueName: \"kubernetes.io/projected/1c1789cc-9c03-4473-a097-337f66aa38e9-kube-api-access-qd4zk\") on node \"crc\" DevicePath \"\"" Dec 06 15:50:42 crc kubenswrapper[5003]: I1206 15:50:42.331172 5003 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1c1789cc-9c03-4473-a097-337f66aa38e9-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 06 15:50:42 crc kubenswrapper[5003]: I1206 15:50:42.331182 5003 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ab07907f-273d-4c4a-844d-74244e74ffe4-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 06 15:50:42 crc kubenswrapper[5003]: I1206 15:50:42.836028 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="horizon-kuttl-tests/keystone-db-create-nkp64" event={"ID":"1c1789cc-9c03-4473-a097-337f66aa38e9","Type":"ContainerDied","Data":"dc36734a39228efb99e27c90ee6e8299ab7587f671e8c0462f029d88462bee23"} Dec 06 15:50:42 crc kubenswrapper[5003]: I1206 15:50:42.836373 5003 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="dc36734a39228efb99e27c90ee6e8299ab7587f671e8c0462f029d88462bee23" Dec 06 15:50:42 crc kubenswrapper[5003]: I1206 15:50:42.836461 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="horizon-kuttl-tests/keystone-db-create-nkp64" Dec 06 15:50:42 crc kubenswrapper[5003]: I1206 15:50:42.846715 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-index-9x6k2" event={"ID":"14a9e8ed-58bb-4f3d-969a-76a2328ae4b0","Type":"ContainerStarted","Data":"bb30ef493563cbc66e5884da5f2c390efa1e852d9e298f1838b82b4860d9e01f"} Dec 06 15:50:42 crc kubenswrapper[5003]: I1206 15:50:42.850101 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="horizon-kuttl-tests/keystone-fdf1-account-create-update-px2s8" event={"ID":"ab07907f-273d-4c4a-844d-74244e74ffe4","Type":"ContainerDied","Data":"4df3133e5742565bd3cea40b707e36af7a066313ae99dff12ae559e8d9ecf623"} Dec 06 15:50:42 crc kubenswrapper[5003]: I1206 15:50:42.850139 5003 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4df3133e5742565bd3cea40b707e36af7a066313ae99dff12ae559e8d9ecf623" Dec 06 15:50:42 crc kubenswrapper[5003]: I1206 15:50:42.850180 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="horizon-kuttl-tests/keystone-fdf1-account-create-update-px2s8" Dec 06 15:50:44 crc kubenswrapper[5003]: I1206 15:50:44.865167 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-index-9x6k2" event={"ID":"14a9e8ed-58bb-4f3d-969a-76a2328ae4b0","Type":"ContainerStarted","Data":"06f350fff68dc9ccbca417d264fecb7ee3e3415ad3012e808d8c117e7edcc742"} Dec 06 15:50:44 crc kubenswrapper[5003]: I1206 15:50:44.881377 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/horizon-operator-index-9x6k2" podStartSLOduration=2.050518449 podStartE2EDuration="3.88135739s" podCreationTimestamp="2025-12-06 15:50:41 +0000 UTC" firstStartedPulling="2025-12-06 15:50:42.329174297 +0000 UTC m=+1120.862528678" lastFinishedPulling="2025-12-06 15:50:44.160013228 +0000 UTC m=+1122.693367619" observedRunningTime="2025-12-06 15:50:44.878373627 +0000 UTC m=+1123.411728028" watchObservedRunningTime="2025-12-06 15:50:44.88135739 +0000 UTC m=+1123.414711771" Dec 06 15:50:45 crc kubenswrapper[5003]: I1206 15:50:45.660799 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/horizon-operator-index-9x6k2"] Dec 06 15:50:46 crc kubenswrapper[5003]: I1206 15:50:46.472157 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/horizon-operator-index-mw2lw"] Dec 06 15:50:46 crc kubenswrapper[5003]: E1206 15:50:46.472809 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1c1789cc-9c03-4473-a097-337f66aa38e9" containerName="mariadb-database-create" Dec 06 15:50:46 crc kubenswrapper[5003]: I1206 15:50:46.472829 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="1c1789cc-9c03-4473-a097-337f66aa38e9" containerName="mariadb-database-create" Dec 06 15:50:46 crc kubenswrapper[5003]: E1206 15:50:46.472850 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ab07907f-273d-4c4a-844d-74244e74ffe4" containerName="mariadb-account-create-update" Dec 06 15:50:46 crc kubenswrapper[5003]: I1206 15:50:46.472861 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="ab07907f-273d-4c4a-844d-74244e74ffe4" containerName="mariadb-account-create-update" Dec 06 15:50:46 crc kubenswrapper[5003]: I1206 15:50:46.473049 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="1c1789cc-9c03-4473-a097-337f66aa38e9" containerName="mariadb-database-create" Dec 06 15:50:46 crc kubenswrapper[5003]: I1206 15:50:46.473076 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="ab07907f-273d-4c4a-844d-74244e74ffe4" containerName="mariadb-account-create-update" Dec 06 15:50:46 crc kubenswrapper[5003]: I1206 15:50:46.473775 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-index-mw2lw" Dec 06 15:50:46 crc kubenswrapper[5003]: I1206 15:50:46.483167 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-index-mw2lw"] Dec 06 15:50:46 crc kubenswrapper[5003]: I1206 15:50:46.605129 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pw6qg\" (UniqueName: \"kubernetes.io/projected/611b7f1b-1296-4dee-a189-7e38e1e1f0b9-kube-api-access-pw6qg\") pod \"horizon-operator-index-mw2lw\" (UID: \"611b7f1b-1296-4dee-a189-7e38e1e1f0b9\") " pod="openstack-operators/horizon-operator-index-mw2lw" Dec 06 15:50:46 crc kubenswrapper[5003]: I1206 15:50:46.707098 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pw6qg\" (UniqueName: \"kubernetes.io/projected/611b7f1b-1296-4dee-a189-7e38e1e1f0b9-kube-api-access-pw6qg\") pod \"horizon-operator-index-mw2lw\" (UID: \"611b7f1b-1296-4dee-a189-7e38e1e1f0b9\") " pod="openstack-operators/horizon-operator-index-mw2lw" Dec 06 15:50:46 crc kubenswrapper[5003]: I1206 15:50:46.724073 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pw6qg\" (UniqueName: \"kubernetes.io/projected/611b7f1b-1296-4dee-a189-7e38e1e1f0b9-kube-api-access-pw6qg\") pod \"horizon-operator-index-mw2lw\" (UID: \"611b7f1b-1296-4dee-a189-7e38e1e1f0b9\") " pod="openstack-operators/horizon-operator-index-mw2lw" Dec 06 15:50:46 crc kubenswrapper[5003]: I1206 15:50:46.806052 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-index-mw2lw" Dec 06 15:50:46 crc kubenswrapper[5003]: I1206 15:50:46.879598 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/horizon-operator-index-9x6k2" podUID="14a9e8ed-58bb-4f3d-969a-76a2328ae4b0" containerName="registry-server" containerID="cri-o://06f350fff68dc9ccbca417d264fecb7ee3e3415ad3012e808d8c117e7edcc742" gracePeriod=2 Dec 06 15:50:47 crc kubenswrapper[5003]: I1206 15:50:47.229861 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-index-mw2lw"] Dec 06 15:50:47 crc kubenswrapper[5003]: I1206 15:50:47.254991 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-index-9x6k2" Dec 06 15:50:47 crc kubenswrapper[5003]: I1206 15:50:47.316372 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wrfsq\" (UniqueName: \"kubernetes.io/projected/14a9e8ed-58bb-4f3d-969a-76a2328ae4b0-kube-api-access-wrfsq\") pod \"14a9e8ed-58bb-4f3d-969a-76a2328ae4b0\" (UID: \"14a9e8ed-58bb-4f3d-969a-76a2328ae4b0\") " Dec 06 15:50:47 crc kubenswrapper[5003]: I1206 15:50:47.321197 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/14a9e8ed-58bb-4f3d-969a-76a2328ae4b0-kube-api-access-wrfsq" (OuterVolumeSpecName: "kube-api-access-wrfsq") pod "14a9e8ed-58bb-4f3d-969a-76a2328ae4b0" (UID: "14a9e8ed-58bb-4f3d-969a-76a2328ae4b0"). InnerVolumeSpecName "kube-api-access-wrfsq". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 15:50:47 crc kubenswrapper[5003]: I1206 15:50:47.418375 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wrfsq\" (UniqueName: \"kubernetes.io/projected/14a9e8ed-58bb-4f3d-969a-76a2328ae4b0-kube-api-access-wrfsq\") on node \"crc\" DevicePath \"\"" Dec 06 15:50:47 crc kubenswrapper[5003]: I1206 15:50:47.890048 5003 generic.go:334] "Generic (PLEG): container finished" podID="14a9e8ed-58bb-4f3d-969a-76a2328ae4b0" containerID="06f350fff68dc9ccbca417d264fecb7ee3e3415ad3012e808d8c117e7edcc742" exitCode=0 Dec 06 15:50:47 crc kubenswrapper[5003]: I1206 15:50:47.890119 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-index-9x6k2" event={"ID":"14a9e8ed-58bb-4f3d-969a-76a2328ae4b0","Type":"ContainerDied","Data":"06f350fff68dc9ccbca417d264fecb7ee3e3415ad3012e808d8c117e7edcc742"} Dec 06 15:50:47 crc kubenswrapper[5003]: I1206 15:50:47.890147 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-index-9x6k2" Dec 06 15:50:47 crc kubenswrapper[5003]: I1206 15:50:47.890178 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-index-9x6k2" event={"ID":"14a9e8ed-58bb-4f3d-969a-76a2328ae4b0","Type":"ContainerDied","Data":"bb30ef493563cbc66e5884da5f2c390efa1e852d9e298f1838b82b4860d9e01f"} Dec 06 15:50:47 crc kubenswrapper[5003]: I1206 15:50:47.890209 5003 scope.go:117] "RemoveContainer" containerID="06f350fff68dc9ccbca417d264fecb7ee3e3415ad3012e808d8c117e7edcc742" Dec 06 15:50:47 crc kubenswrapper[5003]: I1206 15:50:47.894837 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-index-mw2lw" event={"ID":"611b7f1b-1296-4dee-a189-7e38e1e1f0b9","Type":"ContainerStarted","Data":"ef5dd15fdf7318dcdd3307bc1ea72789ea6bddf6fa0765f2ce2c41cfd748a155"} Dec 06 15:50:47 crc kubenswrapper[5003]: I1206 15:50:47.894895 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-index-mw2lw" event={"ID":"611b7f1b-1296-4dee-a189-7e38e1e1f0b9","Type":"ContainerStarted","Data":"d93ffe1d145f0d2f18a16d75429ec17df29e121eabc7a74c5137b7aa55534bee"} Dec 06 15:50:47 crc kubenswrapper[5003]: I1206 15:50:47.911690 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/horizon-operator-index-9x6k2"] Dec 06 15:50:47 crc kubenswrapper[5003]: I1206 15:50:47.914584 5003 scope.go:117] "RemoveContainer" containerID="06f350fff68dc9ccbca417d264fecb7ee3e3415ad3012e808d8c117e7edcc742" Dec 06 15:50:47 crc kubenswrapper[5003]: E1206 15:50:47.918195 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"06f350fff68dc9ccbca417d264fecb7ee3e3415ad3012e808d8c117e7edcc742\": container with ID starting with 06f350fff68dc9ccbca417d264fecb7ee3e3415ad3012e808d8c117e7edcc742 not found: ID does not exist" containerID="06f350fff68dc9ccbca417d264fecb7ee3e3415ad3012e808d8c117e7edcc742" Dec 06 15:50:47 crc kubenswrapper[5003]: I1206 15:50:47.918239 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"06f350fff68dc9ccbca417d264fecb7ee3e3415ad3012e808d8c117e7edcc742"} err="failed to get container status \"06f350fff68dc9ccbca417d264fecb7ee3e3415ad3012e808d8c117e7edcc742\": rpc error: code = NotFound desc = could not find container \"06f350fff68dc9ccbca417d264fecb7ee3e3415ad3012e808d8c117e7edcc742\": container with ID starting with 06f350fff68dc9ccbca417d264fecb7ee3e3415ad3012e808d8c117e7edcc742 not found: ID does not exist" Dec 06 15:50:47 crc kubenswrapper[5003]: I1206 15:50:47.921402 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/horizon-operator-index-9x6k2"] Dec 06 15:50:47 crc kubenswrapper[5003]: I1206 15:50:47.932022 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/horizon-operator-index-mw2lw" podStartSLOduration=1.8689733309999998 podStartE2EDuration="1.931999737s" podCreationTimestamp="2025-12-06 15:50:46 +0000 UTC" firstStartedPulling="2025-12-06 15:50:47.231322904 +0000 UTC m=+1125.764677285" lastFinishedPulling="2025-12-06 15:50:47.29434929 +0000 UTC m=+1125.827703691" observedRunningTime="2025-12-06 15:50:47.923356579 +0000 UTC m=+1126.456710980" watchObservedRunningTime="2025-12-06 15:50:47.931999737 +0000 UTC m=+1126.465354118" Dec 06 15:50:48 crc kubenswrapper[5003]: I1206 15:50:48.957520 5003 patch_prober.go:28] interesting pod/machine-config-daemon-w25db container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 06 15:50:48 crc kubenswrapper[5003]: I1206 15:50:48.966647 5003 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-w25db" podUID="1a047c4d-003e-4668-9b96-945eab34ab68" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 06 15:50:49 crc kubenswrapper[5003]: I1206 15:50:49.719269 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="14a9e8ed-58bb-4f3d-969a-76a2328ae4b0" path="/var/lib/kubelet/pods/14a9e8ed-58bb-4f3d-969a-76a2328ae4b0/volumes" Dec 06 15:50:55 crc kubenswrapper[5003]: I1206 15:50:55.011671 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="horizon-kuttl-tests/rabbitmq-server-0" Dec 06 15:50:55 crc kubenswrapper[5003]: I1206 15:50:55.548222 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["horizon-kuttl-tests/keystone-db-sync-s6w7z"] Dec 06 15:50:55 crc kubenswrapper[5003]: E1206 15:50:55.548566 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="14a9e8ed-58bb-4f3d-969a-76a2328ae4b0" containerName="registry-server" Dec 06 15:50:55 crc kubenswrapper[5003]: I1206 15:50:55.548582 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="14a9e8ed-58bb-4f3d-969a-76a2328ae4b0" containerName="registry-server" Dec 06 15:50:55 crc kubenswrapper[5003]: I1206 15:50:55.548759 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="14a9e8ed-58bb-4f3d-969a-76a2328ae4b0" containerName="registry-server" Dec 06 15:50:55 crc kubenswrapper[5003]: I1206 15:50:55.549223 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="horizon-kuttl-tests/keystone-db-sync-s6w7z" Dec 06 15:50:55 crc kubenswrapper[5003]: I1206 15:50:55.551265 5003 reflector.go:368] Caches populated for *v1.Secret from object-"horizon-kuttl-tests"/"keystone-keystone-dockercfg-4thbk" Dec 06 15:50:55 crc kubenswrapper[5003]: I1206 15:50:55.551770 5003 reflector.go:368] Caches populated for *v1.Secret from object-"horizon-kuttl-tests"/"keystone-scripts" Dec 06 15:50:55 crc kubenswrapper[5003]: I1206 15:50:55.552243 5003 reflector.go:368] Caches populated for *v1.Secret from object-"horizon-kuttl-tests"/"keystone" Dec 06 15:50:55 crc kubenswrapper[5003]: I1206 15:50:55.553173 5003 reflector.go:368] Caches populated for *v1.Secret from object-"horizon-kuttl-tests"/"keystone-config-data" Dec 06 15:50:55 crc kubenswrapper[5003]: I1206 15:50:55.564078 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["horizon-kuttl-tests/keystone-db-sync-s6w7z"] Dec 06 15:50:55 crc kubenswrapper[5003]: I1206 15:50:55.638871 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ttcj4\" (UniqueName: \"kubernetes.io/projected/ea87d520-aeaf-4255-8015-7ce5ef802967-kube-api-access-ttcj4\") pod \"keystone-db-sync-s6w7z\" (UID: \"ea87d520-aeaf-4255-8015-7ce5ef802967\") " pod="horizon-kuttl-tests/keystone-db-sync-s6w7z" Dec 06 15:50:55 crc kubenswrapper[5003]: I1206 15:50:55.638973 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ea87d520-aeaf-4255-8015-7ce5ef802967-config-data\") pod \"keystone-db-sync-s6w7z\" (UID: \"ea87d520-aeaf-4255-8015-7ce5ef802967\") " pod="horizon-kuttl-tests/keystone-db-sync-s6w7z" Dec 06 15:50:55 crc kubenswrapper[5003]: I1206 15:50:55.740020 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ttcj4\" (UniqueName: \"kubernetes.io/projected/ea87d520-aeaf-4255-8015-7ce5ef802967-kube-api-access-ttcj4\") pod \"keystone-db-sync-s6w7z\" (UID: \"ea87d520-aeaf-4255-8015-7ce5ef802967\") " pod="horizon-kuttl-tests/keystone-db-sync-s6w7z" Dec 06 15:50:55 crc kubenswrapper[5003]: I1206 15:50:55.740109 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ea87d520-aeaf-4255-8015-7ce5ef802967-config-data\") pod \"keystone-db-sync-s6w7z\" (UID: \"ea87d520-aeaf-4255-8015-7ce5ef802967\") " pod="horizon-kuttl-tests/keystone-db-sync-s6w7z" Dec 06 15:50:55 crc kubenswrapper[5003]: I1206 15:50:55.748316 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ea87d520-aeaf-4255-8015-7ce5ef802967-config-data\") pod \"keystone-db-sync-s6w7z\" (UID: \"ea87d520-aeaf-4255-8015-7ce5ef802967\") " pod="horizon-kuttl-tests/keystone-db-sync-s6w7z" Dec 06 15:50:55 crc kubenswrapper[5003]: I1206 15:50:55.756248 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ttcj4\" (UniqueName: \"kubernetes.io/projected/ea87d520-aeaf-4255-8015-7ce5ef802967-kube-api-access-ttcj4\") pod \"keystone-db-sync-s6w7z\" (UID: \"ea87d520-aeaf-4255-8015-7ce5ef802967\") " pod="horizon-kuttl-tests/keystone-db-sync-s6w7z" Dec 06 15:50:55 crc kubenswrapper[5003]: I1206 15:50:55.867881 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="horizon-kuttl-tests/keystone-db-sync-s6w7z" Dec 06 15:50:56 crc kubenswrapper[5003]: I1206 15:50:56.275314 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["horizon-kuttl-tests/keystone-db-sync-s6w7z"] Dec 06 15:50:56 crc kubenswrapper[5003]: I1206 15:50:56.278327 5003 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 06 15:50:56 crc kubenswrapper[5003]: I1206 15:50:56.806185 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-operators/horizon-operator-index-mw2lw" Dec 06 15:50:56 crc kubenswrapper[5003]: I1206 15:50:56.806280 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/horizon-operator-index-mw2lw" Dec 06 15:50:56 crc kubenswrapper[5003]: I1206 15:50:56.837697 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-operators/horizon-operator-index-mw2lw" Dec 06 15:50:57 crc kubenswrapper[5003]: I1206 15:50:57.060537 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="horizon-kuttl-tests/keystone-db-sync-s6w7z" event={"ID":"ea87d520-aeaf-4255-8015-7ce5ef802967","Type":"ContainerStarted","Data":"fc21a6b987a289c2f07e4f5682642ba3729c3ee4056bfe3e423ca82ea4280063"} Dec 06 15:50:57 crc kubenswrapper[5003]: I1206 15:50:57.093465 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/horizon-operator-index-mw2lw" Dec 06 15:51:00 crc kubenswrapper[5003]: I1206 15:51:00.704571 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ba5562828bf80d0aea4e250c924daf1f8fc1de13aae41ce98e1a26408344tts"] Dec 06 15:51:00 crc kubenswrapper[5003]: I1206 15:51:00.706071 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ba5562828bf80d0aea4e250c924daf1f8fc1de13aae41ce98e1a26408344tts" Dec 06 15:51:00 crc kubenswrapper[5003]: I1206 15:51:00.708341 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"default-dockercfg-kqjk5" Dec 06 15:51:00 crc kubenswrapper[5003]: I1206 15:51:00.712672 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ba5562828bf80d0aea4e250c924daf1f8fc1de13aae41ce98e1a26408344tts"] Dec 06 15:51:00 crc kubenswrapper[5003]: I1206 15:51:00.810540 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/d73504d3-af7e-4ecf-a4a2-75d8b0aa8fa6-util\") pod \"ba5562828bf80d0aea4e250c924daf1f8fc1de13aae41ce98e1a26408344tts\" (UID: \"d73504d3-af7e-4ecf-a4a2-75d8b0aa8fa6\") " pod="openstack-operators/ba5562828bf80d0aea4e250c924daf1f8fc1de13aae41ce98e1a26408344tts" Dec 06 15:51:00 crc kubenswrapper[5003]: I1206 15:51:00.811015 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/d73504d3-af7e-4ecf-a4a2-75d8b0aa8fa6-bundle\") pod \"ba5562828bf80d0aea4e250c924daf1f8fc1de13aae41ce98e1a26408344tts\" (UID: \"d73504d3-af7e-4ecf-a4a2-75d8b0aa8fa6\") " pod="openstack-operators/ba5562828bf80d0aea4e250c924daf1f8fc1de13aae41ce98e1a26408344tts" Dec 06 15:51:00 crc kubenswrapper[5003]: I1206 15:51:00.811057 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7lk88\" (UniqueName: \"kubernetes.io/projected/d73504d3-af7e-4ecf-a4a2-75d8b0aa8fa6-kube-api-access-7lk88\") pod \"ba5562828bf80d0aea4e250c924daf1f8fc1de13aae41ce98e1a26408344tts\" (UID: \"d73504d3-af7e-4ecf-a4a2-75d8b0aa8fa6\") " pod="openstack-operators/ba5562828bf80d0aea4e250c924daf1f8fc1de13aae41ce98e1a26408344tts" Dec 06 15:51:00 crc kubenswrapper[5003]: I1206 15:51:00.911770 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/d73504d3-af7e-4ecf-a4a2-75d8b0aa8fa6-bundle\") pod \"ba5562828bf80d0aea4e250c924daf1f8fc1de13aae41ce98e1a26408344tts\" (UID: \"d73504d3-af7e-4ecf-a4a2-75d8b0aa8fa6\") " pod="openstack-operators/ba5562828bf80d0aea4e250c924daf1f8fc1de13aae41ce98e1a26408344tts" Dec 06 15:51:00 crc kubenswrapper[5003]: I1206 15:51:00.911824 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7lk88\" (UniqueName: \"kubernetes.io/projected/d73504d3-af7e-4ecf-a4a2-75d8b0aa8fa6-kube-api-access-7lk88\") pod \"ba5562828bf80d0aea4e250c924daf1f8fc1de13aae41ce98e1a26408344tts\" (UID: \"d73504d3-af7e-4ecf-a4a2-75d8b0aa8fa6\") " pod="openstack-operators/ba5562828bf80d0aea4e250c924daf1f8fc1de13aae41ce98e1a26408344tts" Dec 06 15:51:00 crc kubenswrapper[5003]: I1206 15:51:00.911901 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/d73504d3-af7e-4ecf-a4a2-75d8b0aa8fa6-util\") pod \"ba5562828bf80d0aea4e250c924daf1f8fc1de13aae41ce98e1a26408344tts\" (UID: \"d73504d3-af7e-4ecf-a4a2-75d8b0aa8fa6\") " pod="openstack-operators/ba5562828bf80d0aea4e250c924daf1f8fc1de13aae41ce98e1a26408344tts" Dec 06 15:51:00 crc kubenswrapper[5003]: I1206 15:51:00.912316 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/d73504d3-af7e-4ecf-a4a2-75d8b0aa8fa6-bundle\") pod \"ba5562828bf80d0aea4e250c924daf1f8fc1de13aae41ce98e1a26408344tts\" (UID: \"d73504d3-af7e-4ecf-a4a2-75d8b0aa8fa6\") " pod="openstack-operators/ba5562828bf80d0aea4e250c924daf1f8fc1de13aae41ce98e1a26408344tts" Dec 06 15:51:00 crc kubenswrapper[5003]: I1206 15:51:00.912332 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/d73504d3-af7e-4ecf-a4a2-75d8b0aa8fa6-util\") pod \"ba5562828bf80d0aea4e250c924daf1f8fc1de13aae41ce98e1a26408344tts\" (UID: \"d73504d3-af7e-4ecf-a4a2-75d8b0aa8fa6\") " pod="openstack-operators/ba5562828bf80d0aea4e250c924daf1f8fc1de13aae41ce98e1a26408344tts" Dec 06 15:51:00 crc kubenswrapper[5003]: I1206 15:51:00.947009 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7lk88\" (UniqueName: \"kubernetes.io/projected/d73504d3-af7e-4ecf-a4a2-75d8b0aa8fa6-kube-api-access-7lk88\") pod \"ba5562828bf80d0aea4e250c924daf1f8fc1de13aae41ce98e1a26408344tts\" (UID: \"d73504d3-af7e-4ecf-a4a2-75d8b0aa8fa6\") " pod="openstack-operators/ba5562828bf80d0aea4e250c924daf1f8fc1de13aae41ce98e1a26408344tts" Dec 06 15:51:01 crc kubenswrapper[5003]: I1206 15:51:01.022420 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ba5562828bf80d0aea4e250c924daf1f8fc1de13aae41ce98e1a26408344tts" Dec 06 15:51:04 crc kubenswrapper[5003]: I1206 15:51:04.113689 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="horizon-kuttl-tests/keystone-db-sync-s6w7z" event={"ID":"ea87d520-aeaf-4255-8015-7ce5ef802967","Type":"ContainerStarted","Data":"6c4f1bf22a644bb37729dee341fb47738c94559bb939778fac9a7e4e03de14f4"} Dec 06 15:51:04 crc kubenswrapper[5003]: I1206 15:51:04.154428 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="horizon-kuttl-tests/keystone-db-sync-s6w7z" podStartSLOduration=1.622207055 podStartE2EDuration="9.154404998s" podCreationTimestamp="2025-12-06 15:50:55 +0000 UTC" firstStartedPulling="2025-12-06 15:50:56.277913135 +0000 UTC m=+1134.811267556" lastFinishedPulling="2025-12-06 15:51:03.810111098 +0000 UTC m=+1142.343465499" observedRunningTime="2025-12-06 15:51:04.14466038 +0000 UTC m=+1142.678014781" watchObservedRunningTime="2025-12-06 15:51:04.154404998 +0000 UTC m=+1142.687759409" Dec 06 15:51:04 crc kubenswrapper[5003]: I1206 15:51:04.167377 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ba5562828bf80d0aea4e250c924daf1f8fc1de13aae41ce98e1a26408344tts"] Dec 06 15:51:04 crc kubenswrapper[5003]: W1206 15:51:04.170942 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd73504d3_af7e_4ecf_a4a2_75d8b0aa8fa6.slice/crio-148783b5a5eb85f268072a0211a760ae6f35fc635fd3ecc687517c81f1d269b9 WatchSource:0}: Error finding container 148783b5a5eb85f268072a0211a760ae6f35fc635fd3ecc687517c81f1d269b9: Status 404 returned error can't find the container with id 148783b5a5eb85f268072a0211a760ae6f35fc635fd3ecc687517c81f1d269b9 Dec 06 15:51:05 crc kubenswrapper[5003]: I1206 15:51:05.122841 5003 generic.go:334] "Generic (PLEG): container finished" podID="d73504d3-af7e-4ecf-a4a2-75d8b0aa8fa6" containerID="b91689da9f4bcbca694055bf7c3acfb1316d57f399775ac2bd45d36cf215f8ab" exitCode=0 Dec 06 15:51:05 crc kubenswrapper[5003]: I1206 15:51:05.124150 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ba5562828bf80d0aea4e250c924daf1f8fc1de13aae41ce98e1a26408344tts" event={"ID":"d73504d3-af7e-4ecf-a4a2-75d8b0aa8fa6","Type":"ContainerDied","Data":"b91689da9f4bcbca694055bf7c3acfb1316d57f399775ac2bd45d36cf215f8ab"} Dec 06 15:51:05 crc kubenswrapper[5003]: I1206 15:51:05.124194 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ba5562828bf80d0aea4e250c924daf1f8fc1de13aae41ce98e1a26408344tts" event={"ID":"d73504d3-af7e-4ecf-a4a2-75d8b0aa8fa6","Type":"ContainerStarted","Data":"148783b5a5eb85f268072a0211a760ae6f35fc635fd3ecc687517c81f1d269b9"} Dec 06 15:51:06 crc kubenswrapper[5003]: I1206 15:51:06.130284 5003 generic.go:334] "Generic (PLEG): container finished" podID="d73504d3-af7e-4ecf-a4a2-75d8b0aa8fa6" containerID="cea255bff2e324f216b4ed1f552beaf6a971495b16f63648b6799eb625bab92d" exitCode=0 Dec 06 15:51:06 crc kubenswrapper[5003]: I1206 15:51:06.130371 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ba5562828bf80d0aea4e250c924daf1f8fc1de13aae41ce98e1a26408344tts" event={"ID":"d73504d3-af7e-4ecf-a4a2-75d8b0aa8fa6","Type":"ContainerDied","Data":"cea255bff2e324f216b4ed1f552beaf6a971495b16f63648b6799eb625bab92d"} Dec 06 15:51:07 crc kubenswrapper[5003]: I1206 15:51:07.139652 5003 generic.go:334] "Generic (PLEG): container finished" podID="d73504d3-af7e-4ecf-a4a2-75d8b0aa8fa6" containerID="ddb8e4fb15308ccb985beeda4649b52cc21565b37c13180a954123225c1287d4" exitCode=0 Dec 06 15:51:07 crc kubenswrapper[5003]: I1206 15:51:07.139739 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ba5562828bf80d0aea4e250c924daf1f8fc1de13aae41ce98e1a26408344tts" event={"ID":"d73504d3-af7e-4ecf-a4a2-75d8b0aa8fa6","Type":"ContainerDied","Data":"ddb8e4fb15308ccb985beeda4649b52cc21565b37c13180a954123225c1287d4"} Dec 06 15:51:08 crc kubenswrapper[5003]: I1206 15:51:08.150602 5003 generic.go:334] "Generic (PLEG): container finished" podID="ea87d520-aeaf-4255-8015-7ce5ef802967" containerID="6c4f1bf22a644bb37729dee341fb47738c94559bb939778fac9a7e4e03de14f4" exitCode=0 Dec 06 15:51:08 crc kubenswrapper[5003]: I1206 15:51:08.150753 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="horizon-kuttl-tests/keystone-db-sync-s6w7z" event={"ID":"ea87d520-aeaf-4255-8015-7ce5ef802967","Type":"ContainerDied","Data":"6c4f1bf22a644bb37729dee341fb47738c94559bb939778fac9a7e4e03de14f4"} Dec 06 15:51:08 crc kubenswrapper[5003]: I1206 15:51:08.429609 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ba5562828bf80d0aea4e250c924daf1f8fc1de13aae41ce98e1a26408344tts" Dec 06 15:51:08 crc kubenswrapper[5003]: I1206 15:51:08.524250 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7lk88\" (UniqueName: \"kubernetes.io/projected/d73504d3-af7e-4ecf-a4a2-75d8b0aa8fa6-kube-api-access-7lk88\") pod \"d73504d3-af7e-4ecf-a4a2-75d8b0aa8fa6\" (UID: \"d73504d3-af7e-4ecf-a4a2-75d8b0aa8fa6\") " Dec 06 15:51:08 crc kubenswrapper[5003]: I1206 15:51:08.524679 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/d73504d3-af7e-4ecf-a4a2-75d8b0aa8fa6-bundle\") pod \"d73504d3-af7e-4ecf-a4a2-75d8b0aa8fa6\" (UID: \"d73504d3-af7e-4ecf-a4a2-75d8b0aa8fa6\") " Dec 06 15:51:08 crc kubenswrapper[5003]: I1206 15:51:08.524920 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/d73504d3-af7e-4ecf-a4a2-75d8b0aa8fa6-util\") pod \"d73504d3-af7e-4ecf-a4a2-75d8b0aa8fa6\" (UID: \"d73504d3-af7e-4ecf-a4a2-75d8b0aa8fa6\") " Dec 06 15:51:08 crc kubenswrapper[5003]: I1206 15:51:08.525337 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d73504d3-af7e-4ecf-a4a2-75d8b0aa8fa6-bundle" (OuterVolumeSpecName: "bundle") pod "d73504d3-af7e-4ecf-a4a2-75d8b0aa8fa6" (UID: "d73504d3-af7e-4ecf-a4a2-75d8b0aa8fa6"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 15:51:08 crc kubenswrapper[5003]: I1206 15:51:08.525705 5003 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/d73504d3-af7e-4ecf-a4a2-75d8b0aa8fa6-bundle\") on node \"crc\" DevicePath \"\"" Dec 06 15:51:08 crc kubenswrapper[5003]: I1206 15:51:08.530735 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d73504d3-af7e-4ecf-a4a2-75d8b0aa8fa6-kube-api-access-7lk88" (OuterVolumeSpecName: "kube-api-access-7lk88") pod "d73504d3-af7e-4ecf-a4a2-75d8b0aa8fa6" (UID: "d73504d3-af7e-4ecf-a4a2-75d8b0aa8fa6"). InnerVolumeSpecName "kube-api-access-7lk88". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 15:51:08 crc kubenswrapper[5003]: I1206 15:51:08.539296 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d73504d3-af7e-4ecf-a4a2-75d8b0aa8fa6-util" (OuterVolumeSpecName: "util") pod "d73504d3-af7e-4ecf-a4a2-75d8b0aa8fa6" (UID: "d73504d3-af7e-4ecf-a4a2-75d8b0aa8fa6"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 15:51:08 crc kubenswrapper[5003]: I1206 15:51:08.627122 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7lk88\" (UniqueName: \"kubernetes.io/projected/d73504d3-af7e-4ecf-a4a2-75d8b0aa8fa6-kube-api-access-7lk88\") on node \"crc\" DevicePath \"\"" Dec 06 15:51:08 crc kubenswrapper[5003]: I1206 15:51:08.627164 5003 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/d73504d3-af7e-4ecf-a4a2-75d8b0aa8fa6-util\") on node \"crc\" DevicePath \"\"" Dec 06 15:51:09 crc kubenswrapper[5003]: I1206 15:51:09.162824 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ba5562828bf80d0aea4e250c924daf1f8fc1de13aae41ce98e1a26408344tts" event={"ID":"d73504d3-af7e-4ecf-a4a2-75d8b0aa8fa6","Type":"ContainerDied","Data":"148783b5a5eb85f268072a0211a760ae6f35fc635fd3ecc687517c81f1d269b9"} Dec 06 15:51:09 crc kubenswrapper[5003]: I1206 15:51:09.163202 5003 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="148783b5a5eb85f268072a0211a760ae6f35fc635fd3ecc687517c81f1d269b9" Dec 06 15:51:09 crc kubenswrapper[5003]: I1206 15:51:09.162895 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ba5562828bf80d0aea4e250c924daf1f8fc1de13aae41ce98e1a26408344tts" Dec 06 15:51:09 crc kubenswrapper[5003]: I1206 15:51:09.470784 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="horizon-kuttl-tests/keystone-db-sync-s6w7z" Dec 06 15:51:09 crc kubenswrapper[5003]: I1206 15:51:09.641032 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ttcj4\" (UniqueName: \"kubernetes.io/projected/ea87d520-aeaf-4255-8015-7ce5ef802967-kube-api-access-ttcj4\") pod \"ea87d520-aeaf-4255-8015-7ce5ef802967\" (UID: \"ea87d520-aeaf-4255-8015-7ce5ef802967\") " Dec 06 15:51:09 crc kubenswrapper[5003]: I1206 15:51:09.641097 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ea87d520-aeaf-4255-8015-7ce5ef802967-config-data\") pod \"ea87d520-aeaf-4255-8015-7ce5ef802967\" (UID: \"ea87d520-aeaf-4255-8015-7ce5ef802967\") " Dec 06 15:51:09 crc kubenswrapper[5003]: I1206 15:51:09.645896 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ea87d520-aeaf-4255-8015-7ce5ef802967-kube-api-access-ttcj4" (OuterVolumeSpecName: "kube-api-access-ttcj4") pod "ea87d520-aeaf-4255-8015-7ce5ef802967" (UID: "ea87d520-aeaf-4255-8015-7ce5ef802967"). InnerVolumeSpecName "kube-api-access-ttcj4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 15:51:09 crc kubenswrapper[5003]: I1206 15:51:09.686983 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ea87d520-aeaf-4255-8015-7ce5ef802967-config-data" (OuterVolumeSpecName: "config-data") pod "ea87d520-aeaf-4255-8015-7ce5ef802967" (UID: "ea87d520-aeaf-4255-8015-7ce5ef802967"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 15:51:09 crc kubenswrapper[5003]: I1206 15:51:09.742909 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ttcj4\" (UniqueName: \"kubernetes.io/projected/ea87d520-aeaf-4255-8015-7ce5ef802967-kube-api-access-ttcj4\") on node \"crc\" DevicePath \"\"" Dec 06 15:51:09 crc kubenswrapper[5003]: I1206 15:51:09.742952 5003 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ea87d520-aeaf-4255-8015-7ce5ef802967-config-data\") on node \"crc\" DevicePath \"\"" Dec 06 15:51:10 crc kubenswrapper[5003]: I1206 15:51:10.169557 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="horizon-kuttl-tests/keystone-db-sync-s6w7z" event={"ID":"ea87d520-aeaf-4255-8015-7ce5ef802967","Type":"ContainerDied","Data":"fc21a6b987a289c2f07e4f5682642ba3729c3ee4056bfe3e423ca82ea4280063"} Dec 06 15:51:10 crc kubenswrapper[5003]: I1206 15:51:10.169594 5003 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="fc21a6b987a289c2f07e4f5682642ba3729c3ee4056bfe3e423ca82ea4280063" Dec 06 15:51:10 crc kubenswrapper[5003]: I1206 15:51:10.170418 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="horizon-kuttl-tests/keystone-db-sync-s6w7z" Dec 06 15:51:10 crc kubenswrapper[5003]: I1206 15:51:10.366930 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["horizon-kuttl-tests/keystone-bootstrap-hvc5l"] Dec 06 15:51:10 crc kubenswrapper[5003]: E1206 15:51:10.367363 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d73504d3-af7e-4ecf-a4a2-75d8b0aa8fa6" containerName="util" Dec 06 15:51:10 crc kubenswrapper[5003]: I1206 15:51:10.367433 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="d73504d3-af7e-4ecf-a4a2-75d8b0aa8fa6" containerName="util" Dec 06 15:51:10 crc kubenswrapper[5003]: E1206 15:51:10.367515 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d73504d3-af7e-4ecf-a4a2-75d8b0aa8fa6" containerName="pull" Dec 06 15:51:10 crc kubenswrapper[5003]: I1206 15:51:10.367602 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="d73504d3-af7e-4ecf-a4a2-75d8b0aa8fa6" containerName="pull" Dec 06 15:51:10 crc kubenswrapper[5003]: E1206 15:51:10.367696 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d73504d3-af7e-4ecf-a4a2-75d8b0aa8fa6" containerName="extract" Dec 06 15:51:10 crc kubenswrapper[5003]: I1206 15:51:10.367791 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="d73504d3-af7e-4ecf-a4a2-75d8b0aa8fa6" containerName="extract" Dec 06 15:51:10 crc kubenswrapper[5003]: E1206 15:51:10.367884 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ea87d520-aeaf-4255-8015-7ce5ef802967" containerName="keystone-db-sync" Dec 06 15:51:10 crc kubenswrapper[5003]: I1206 15:51:10.367960 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="ea87d520-aeaf-4255-8015-7ce5ef802967" containerName="keystone-db-sync" Dec 06 15:51:10 crc kubenswrapper[5003]: I1206 15:51:10.368149 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="d73504d3-af7e-4ecf-a4a2-75d8b0aa8fa6" containerName="extract" Dec 06 15:51:10 crc kubenswrapper[5003]: I1206 15:51:10.368222 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="ea87d520-aeaf-4255-8015-7ce5ef802967" containerName="keystone-db-sync" Dec 06 15:51:10 crc kubenswrapper[5003]: I1206 15:51:10.368816 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="horizon-kuttl-tests/keystone-bootstrap-hvc5l" Dec 06 15:51:10 crc kubenswrapper[5003]: I1206 15:51:10.371612 5003 reflector.go:368] Caches populated for *v1.Secret from object-"horizon-kuttl-tests"/"keystone-config-data" Dec 06 15:51:10 crc kubenswrapper[5003]: I1206 15:51:10.371727 5003 reflector.go:368] Caches populated for *v1.Secret from object-"horizon-kuttl-tests"/"keystone" Dec 06 15:51:10 crc kubenswrapper[5003]: I1206 15:51:10.371810 5003 reflector.go:368] Caches populated for *v1.Secret from object-"horizon-kuttl-tests"/"osp-secret" Dec 06 15:51:10 crc kubenswrapper[5003]: I1206 15:51:10.371911 5003 reflector.go:368] Caches populated for *v1.Secret from object-"horizon-kuttl-tests"/"keystone-keystone-dockercfg-4thbk" Dec 06 15:51:10 crc kubenswrapper[5003]: I1206 15:51:10.372005 5003 reflector.go:368] Caches populated for *v1.Secret from object-"horizon-kuttl-tests"/"keystone-scripts" Dec 06 15:51:10 crc kubenswrapper[5003]: I1206 15:51:10.390933 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["horizon-kuttl-tests/keystone-bootstrap-hvc5l"] Dec 06 15:51:10 crc kubenswrapper[5003]: I1206 15:51:10.453212 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/e76a0dac-849b-4bd7-90f5-f0f4d2a4382d-credential-keys\") pod \"keystone-bootstrap-hvc5l\" (UID: \"e76a0dac-849b-4bd7-90f5-f0f4d2a4382d\") " pod="horizon-kuttl-tests/keystone-bootstrap-hvc5l" Dec 06 15:51:10 crc kubenswrapper[5003]: I1206 15:51:10.453500 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e76a0dac-849b-4bd7-90f5-f0f4d2a4382d-scripts\") pod \"keystone-bootstrap-hvc5l\" (UID: \"e76a0dac-849b-4bd7-90f5-f0f4d2a4382d\") " pod="horizon-kuttl-tests/keystone-bootstrap-hvc5l" Dec 06 15:51:10 crc kubenswrapper[5003]: I1206 15:51:10.453604 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p5jz5\" (UniqueName: \"kubernetes.io/projected/e76a0dac-849b-4bd7-90f5-f0f4d2a4382d-kube-api-access-p5jz5\") pod \"keystone-bootstrap-hvc5l\" (UID: \"e76a0dac-849b-4bd7-90f5-f0f4d2a4382d\") " pod="horizon-kuttl-tests/keystone-bootstrap-hvc5l" Dec 06 15:51:10 crc kubenswrapper[5003]: I1206 15:51:10.453773 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/e76a0dac-849b-4bd7-90f5-f0f4d2a4382d-fernet-keys\") pod \"keystone-bootstrap-hvc5l\" (UID: \"e76a0dac-849b-4bd7-90f5-f0f4d2a4382d\") " pod="horizon-kuttl-tests/keystone-bootstrap-hvc5l" Dec 06 15:51:10 crc kubenswrapper[5003]: I1206 15:51:10.453848 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e76a0dac-849b-4bd7-90f5-f0f4d2a4382d-config-data\") pod \"keystone-bootstrap-hvc5l\" (UID: \"e76a0dac-849b-4bd7-90f5-f0f4d2a4382d\") " pod="horizon-kuttl-tests/keystone-bootstrap-hvc5l" Dec 06 15:51:10 crc kubenswrapper[5003]: I1206 15:51:10.554667 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p5jz5\" (UniqueName: \"kubernetes.io/projected/e76a0dac-849b-4bd7-90f5-f0f4d2a4382d-kube-api-access-p5jz5\") pod \"keystone-bootstrap-hvc5l\" (UID: \"e76a0dac-849b-4bd7-90f5-f0f4d2a4382d\") " pod="horizon-kuttl-tests/keystone-bootstrap-hvc5l" Dec 06 15:51:10 crc kubenswrapper[5003]: I1206 15:51:10.554751 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/e76a0dac-849b-4bd7-90f5-f0f4d2a4382d-fernet-keys\") pod \"keystone-bootstrap-hvc5l\" (UID: \"e76a0dac-849b-4bd7-90f5-f0f4d2a4382d\") " pod="horizon-kuttl-tests/keystone-bootstrap-hvc5l" Dec 06 15:51:10 crc kubenswrapper[5003]: I1206 15:51:10.554787 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e76a0dac-849b-4bd7-90f5-f0f4d2a4382d-config-data\") pod \"keystone-bootstrap-hvc5l\" (UID: \"e76a0dac-849b-4bd7-90f5-f0f4d2a4382d\") " pod="horizon-kuttl-tests/keystone-bootstrap-hvc5l" Dec 06 15:51:10 crc kubenswrapper[5003]: I1206 15:51:10.554838 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/e76a0dac-849b-4bd7-90f5-f0f4d2a4382d-credential-keys\") pod \"keystone-bootstrap-hvc5l\" (UID: \"e76a0dac-849b-4bd7-90f5-f0f4d2a4382d\") " pod="horizon-kuttl-tests/keystone-bootstrap-hvc5l" Dec 06 15:51:10 crc kubenswrapper[5003]: I1206 15:51:10.554873 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e76a0dac-849b-4bd7-90f5-f0f4d2a4382d-scripts\") pod \"keystone-bootstrap-hvc5l\" (UID: \"e76a0dac-849b-4bd7-90f5-f0f4d2a4382d\") " pod="horizon-kuttl-tests/keystone-bootstrap-hvc5l" Dec 06 15:51:10 crc kubenswrapper[5003]: I1206 15:51:10.558183 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e76a0dac-849b-4bd7-90f5-f0f4d2a4382d-scripts\") pod \"keystone-bootstrap-hvc5l\" (UID: \"e76a0dac-849b-4bd7-90f5-f0f4d2a4382d\") " pod="horizon-kuttl-tests/keystone-bootstrap-hvc5l" Dec 06 15:51:10 crc kubenswrapper[5003]: I1206 15:51:10.558452 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/e76a0dac-849b-4bd7-90f5-f0f4d2a4382d-fernet-keys\") pod \"keystone-bootstrap-hvc5l\" (UID: \"e76a0dac-849b-4bd7-90f5-f0f4d2a4382d\") " pod="horizon-kuttl-tests/keystone-bootstrap-hvc5l" Dec 06 15:51:10 crc kubenswrapper[5003]: I1206 15:51:10.559216 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/e76a0dac-849b-4bd7-90f5-f0f4d2a4382d-credential-keys\") pod \"keystone-bootstrap-hvc5l\" (UID: \"e76a0dac-849b-4bd7-90f5-f0f4d2a4382d\") " pod="horizon-kuttl-tests/keystone-bootstrap-hvc5l" Dec 06 15:51:10 crc kubenswrapper[5003]: I1206 15:51:10.560373 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e76a0dac-849b-4bd7-90f5-f0f4d2a4382d-config-data\") pod \"keystone-bootstrap-hvc5l\" (UID: \"e76a0dac-849b-4bd7-90f5-f0f4d2a4382d\") " pod="horizon-kuttl-tests/keystone-bootstrap-hvc5l" Dec 06 15:51:10 crc kubenswrapper[5003]: I1206 15:51:10.576461 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p5jz5\" (UniqueName: \"kubernetes.io/projected/e76a0dac-849b-4bd7-90f5-f0f4d2a4382d-kube-api-access-p5jz5\") pod \"keystone-bootstrap-hvc5l\" (UID: \"e76a0dac-849b-4bd7-90f5-f0f4d2a4382d\") " pod="horizon-kuttl-tests/keystone-bootstrap-hvc5l" Dec 06 15:51:10 crc kubenswrapper[5003]: I1206 15:51:10.684020 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="horizon-kuttl-tests/keystone-bootstrap-hvc5l" Dec 06 15:51:11 crc kubenswrapper[5003]: I1206 15:51:11.149065 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["horizon-kuttl-tests/keystone-bootstrap-hvc5l"] Dec 06 15:51:11 crc kubenswrapper[5003]: W1206 15:51:11.156753 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode76a0dac_849b_4bd7_90f5_f0f4d2a4382d.slice/crio-61408681c38ffefe87b4dcd169b5d1b71bd49956514e9e39254f1ba7c8295cd0 WatchSource:0}: Error finding container 61408681c38ffefe87b4dcd169b5d1b71bd49956514e9e39254f1ba7c8295cd0: Status 404 returned error can't find the container with id 61408681c38ffefe87b4dcd169b5d1b71bd49956514e9e39254f1ba7c8295cd0 Dec 06 15:51:11 crc kubenswrapper[5003]: I1206 15:51:11.181563 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="horizon-kuttl-tests/keystone-bootstrap-hvc5l" event={"ID":"e76a0dac-849b-4bd7-90f5-f0f4d2a4382d","Type":"ContainerStarted","Data":"61408681c38ffefe87b4dcd169b5d1b71bd49956514e9e39254f1ba7c8295cd0"} Dec 06 15:51:12 crc kubenswrapper[5003]: I1206 15:51:12.190607 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="horizon-kuttl-tests/keystone-bootstrap-hvc5l" event={"ID":"e76a0dac-849b-4bd7-90f5-f0f4d2a4382d","Type":"ContainerStarted","Data":"1cf3e769f745333d6d3a40fd17567e83131f6f0403cd83b3b8f021350130ad47"} Dec 06 15:51:12 crc kubenswrapper[5003]: I1206 15:51:12.233866 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="horizon-kuttl-tests/keystone-bootstrap-hvc5l" podStartSLOduration=2.233842928 podStartE2EDuration="2.233842928s" podCreationTimestamp="2025-12-06 15:51:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 15:51:12.210820035 +0000 UTC m=+1150.744174506" watchObservedRunningTime="2025-12-06 15:51:12.233842928 +0000 UTC m=+1150.767197309" Dec 06 15:51:14 crc kubenswrapper[5003]: I1206 15:51:14.204020 5003 generic.go:334] "Generic (PLEG): container finished" podID="e76a0dac-849b-4bd7-90f5-f0f4d2a4382d" containerID="1cf3e769f745333d6d3a40fd17567e83131f6f0403cd83b3b8f021350130ad47" exitCode=0 Dec 06 15:51:14 crc kubenswrapper[5003]: I1206 15:51:14.204103 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="horizon-kuttl-tests/keystone-bootstrap-hvc5l" event={"ID":"e76a0dac-849b-4bd7-90f5-f0f4d2a4382d","Type":"ContainerDied","Data":"1cf3e769f745333d6d3a40fd17567e83131f6f0403cd83b3b8f021350130ad47"} Dec 06 15:51:15 crc kubenswrapper[5003]: I1206 15:51:15.520297 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="horizon-kuttl-tests/keystone-bootstrap-hvc5l" Dec 06 15:51:15 crc kubenswrapper[5003]: I1206 15:51:15.628585 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e76a0dac-849b-4bd7-90f5-f0f4d2a4382d-scripts\") pod \"e76a0dac-849b-4bd7-90f5-f0f4d2a4382d\" (UID: \"e76a0dac-849b-4bd7-90f5-f0f4d2a4382d\") " Dec 06 15:51:15 crc kubenswrapper[5003]: I1206 15:51:15.628690 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/e76a0dac-849b-4bd7-90f5-f0f4d2a4382d-fernet-keys\") pod \"e76a0dac-849b-4bd7-90f5-f0f4d2a4382d\" (UID: \"e76a0dac-849b-4bd7-90f5-f0f4d2a4382d\") " Dec 06 15:51:15 crc kubenswrapper[5003]: I1206 15:51:15.628756 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-p5jz5\" (UniqueName: \"kubernetes.io/projected/e76a0dac-849b-4bd7-90f5-f0f4d2a4382d-kube-api-access-p5jz5\") pod \"e76a0dac-849b-4bd7-90f5-f0f4d2a4382d\" (UID: \"e76a0dac-849b-4bd7-90f5-f0f4d2a4382d\") " Dec 06 15:51:15 crc kubenswrapper[5003]: I1206 15:51:15.628776 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e76a0dac-849b-4bd7-90f5-f0f4d2a4382d-config-data\") pod \"e76a0dac-849b-4bd7-90f5-f0f4d2a4382d\" (UID: \"e76a0dac-849b-4bd7-90f5-f0f4d2a4382d\") " Dec 06 15:51:15 crc kubenswrapper[5003]: I1206 15:51:15.628845 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/e76a0dac-849b-4bd7-90f5-f0f4d2a4382d-credential-keys\") pod \"e76a0dac-849b-4bd7-90f5-f0f4d2a4382d\" (UID: \"e76a0dac-849b-4bd7-90f5-f0f4d2a4382d\") " Dec 06 15:51:15 crc kubenswrapper[5003]: I1206 15:51:15.637753 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e76a0dac-849b-4bd7-90f5-f0f4d2a4382d-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "e76a0dac-849b-4bd7-90f5-f0f4d2a4382d" (UID: "e76a0dac-849b-4bd7-90f5-f0f4d2a4382d"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 15:51:15 crc kubenswrapper[5003]: I1206 15:51:15.638651 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e76a0dac-849b-4bd7-90f5-f0f4d2a4382d-scripts" (OuterVolumeSpecName: "scripts") pod "e76a0dac-849b-4bd7-90f5-f0f4d2a4382d" (UID: "e76a0dac-849b-4bd7-90f5-f0f4d2a4382d"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 15:51:15 crc kubenswrapper[5003]: I1206 15:51:15.641627 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e76a0dac-849b-4bd7-90f5-f0f4d2a4382d-kube-api-access-p5jz5" (OuterVolumeSpecName: "kube-api-access-p5jz5") pod "e76a0dac-849b-4bd7-90f5-f0f4d2a4382d" (UID: "e76a0dac-849b-4bd7-90f5-f0f4d2a4382d"). InnerVolumeSpecName "kube-api-access-p5jz5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 15:51:15 crc kubenswrapper[5003]: I1206 15:51:15.642356 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e76a0dac-849b-4bd7-90f5-f0f4d2a4382d-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "e76a0dac-849b-4bd7-90f5-f0f4d2a4382d" (UID: "e76a0dac-849b-4bd7-90f5-f0f4d2a4382d"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 15:51:15 crc kubenswrapper[5003]: I1206 15:51:15.650627 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e76a0dac-849b-4bd7-90f5-f0f4d2a4382d-config-data" (OuterVolumeSpecName: "config-data") pod "e76a0dac-849b-4bd7-90f5-f0f4d2a4382d" (UID: "e76a0dac-849b-4bd7-90f5-f0f4d2a4382d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 15:51:15 crc kubenswrapper[5003]: I1206 15:51:15.730646 5003 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/e76a0dac-849b-4bd7-90f5-f0f4d2a4382d-credential-keys\") on node \"crc\" DevicePath \"\"" Dec 06 15:51:15 crc kubenswrapper[5003]: I1206 15:51:15.730682 5003 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e76a0dac-849b-4bd7-90f5-f0f4d2a4382d-scripts\") on node \"crc\" DevicePath \"\"" Dec 06 15:51:15 crc kubenswrapper[5003]: I1206 15:51:15.730692 5003 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/e76a0dac-849b-4bd7-90f5-f0f4d2a4382d-fernet-keys\") on node \"crc\" DevicePath \"\"" Dec 06 15:51:15 crc kubenswrapper[5003]: I1206 15:51:15.730701 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-p5jz5\" (UniqueName: \"kubernetes.io/projected/e76a0dac-849b-4bd7-90f5-f0f4d2a4382d-kube-api-access-p5jz5\") on node \"crc\" DevicePath \"\"" Dec 06 15:51:15 crc kubenswrapper[5003]: I1206 15:51:15.730712 5003 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e76a0dac-849b-4bd7-90f5-f0f4d2a4382d-config-data\") on node \"crc\" DevicePath \"\"" Dec 06 15:51:16 crc kubenswrapper[5003]: I1206 15:51:16.222799 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="horizon-kuttl-tests/keystone-bootstrap-hvc5l" event={"ID":"e76a0dac-849b-4bd7-90f5-f0f4d2a4382d","Type":"ContainerDied","Data":"61408681c38ffefe87b4dcd169b5d1b71bd49956514e9e39254f1ba7c8295cd0"} Dec 06 15:51:16 crc kubenswrapper[5003]: I1206 15:51:16.222865 5003 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="61408681c38ffefe87b4dcd169b5d1b71bd49956514e9e39254f1ba7c8295cd0" Dec 06 15:51:16 crc kubenswrapper[5003]: I1206 15:51:16.222974 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="horizon-kuttl-tests/keystone-bootstrap-hvc5l" Dec 06 15:51:16 crc kubenswrapper[5003]: I1206 15:51:16.298364 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["horizon-kuttl-tests/keystone-bd6cbdc78-tjc97"] Dec 06 15:51:16 crc kubenswrapper[5003]: E1206 15:51:16.298627 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e76a0dac-849b-4bd7-90f5-f0f4d2a4382d" containerName="keystone-bootstrap" Dec 06 15:51:16 crc kubenswrapper[5003]: I1206 15:51:16.298646 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="e76a0dac-849b-4bd7-90f5-f0f4d2a4382d" containerName="keystone-bootstrap" Dec 06 15:51:16 crc kubenswrapper[5003]: I1206 15:51:16.298767 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="e76a0dac-849b-4bd7-90f5-f0f4d2a4382d" containerName="keystone-bootstrap" Dec 06 15:51:16 crc kubenswrapper[5003]: I1206 15:51:16.299189 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="horizon-kuttl-tests/keystone-bd6cbdc78-tjc97" Dec 06 15:51:16 crc kubenswrapper[5003]: I1206 15:51:16.301567 5003 reflector.go:368] Caches populated for *v1.Secret from object-"horizon-kuttl-tests"/"keystone-scripts" Dec 06 15:51:16 crc kubenswrapper[5003]: I1206 15:51:16.301596 5003 reflector.go:368] Caches populated for *v1.Secret from object-"horizon-kuttl-tests"/"keystone" Dec 06 15:51:16 crc kubenswrapper[5003]: I1206 15:51:16.302702 5003 reflector.go:368] Caches populated for *v1.Secret from object-"horizon-kuttl-tests"/"keystone-keystone-dockercfg-4thbk" Dec 06 15:51:16 crc kubenswrapper[5003]: I1206 15:51:16.302866 5003 reflector.go:368] Caches populated for *v1.Secret from object-"horizon-kuttl-tests"/"keystone-config-data" Dec 06 15:51:16 crc kubenswrapper[5003]: I1206 15:51:16.308161 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["horizon-kuttl-tests/keystone-bd6cbdc78-tjc97"] Dec 06 15:51:16 crc kubenswrapper[5003]: I1206 15:51:16.440071 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/1e075c35-aaca-468e-9276-0ce9bcb6a394-credential-keys\") pod \"keystone-bd6cbdc78-tjc97\" (UID: \"1e075c35-aaca-468e-9276-0ce9bcb6a394\") " pod="horizon-kuttl-tests/keystone-bd6cbdc78-tjc97" Dec 06 15:51:16 crc kubenswrapper[5003]: I1206 15:51:16.440306 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/1e075c35-aaca-468e-9276-0ce9bcb6a394-fernet-keys\") pod \"keystone-bd6cbdc78-tjc97\" (UID: \"1e075c35-aaca-468e-9276-0ce9bcb6a394\") " pod="horizon-kuttl-tests/keystone-bd6cbdc78-tjc97" Dec 06 15:51:16 crc kubenswrapper[5003]: I1206 15:51:16.440355 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1e075c35-aaca-468e-9276-0ce9bcb6a394-scripts\") pod \"keystone-bd6cbdc78-tjc97\" (UID: \"1e075c35-aaca-468e-9276-0ce9bcb6a394\") " pod="horizon-kuttl-tests/keystone-bd6cbdc78-tjc97" Dec 06 15:51:16 crc kubenswrapper[5003]: I1206 15:51:16.440397 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tx6f2\" (UniqueName: \"kubernetes.io/projected/1e075c35-aaca-468e-9276-0ce9bcb6a394-kube-api-access-tx6f2\") pod \"keystone-bd6cbdc78-tjc97\" (UID: \"1e075c35-aaca-468e-9276-0ce9bcb6a394\") " pod="horizon-kuttl-tests/keystone-bd6cbdc78-tjc97" Dec 06 15:51:16 crc kubenswrapper[5003]: I1206 15:51:16.440642 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1e075c35-aaca-468e-9276-0ce9bcb6a394-config-data\") pod \"keystone-bd6cbdc78-tjc97\" (UID: \"1e075c35-aaca-468e-9276-0ce9bcb6a394\") " pod="horizon-kuttl-tests/keystone-bd6cbdc78-tjc97" Dec 06 15:51:16 crc kubenswrapper[5003]: I1206 15:51:16.541920 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/1e075c35-aaca-468e-9276-0ce9bcb6a394-credential-keys\") pod \"keystone-bd6cbdc78-tjc97\" (UID: \"1e075c35-aaca-468e-9276-0ce9bcb6a394\") " pod="horizon-kuttl-tests/keystone-bd6cbdc78-tjc97" Dec 06 15:51:16 crc kubenswrapper[5003]: I1206 15:51:16.542284 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/1e075c35-aaca-468e-9276-0ce9bcb6a394-fernet-keys\") pod \"keystone-bd6cbdc78-tjc97\" (UID: \"1e075c35-aaca-468e-9276-0ce9bcb6a394\") " pod="horizon-kuttl-tests/keystone-bd6cbdc78-tjc97" Dec 06 15:51:16 crc kubenswrapper[5003]: I1206 15:51:16.542303 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1e075c35-aaca-468e-9276-0ce9bcb6a394-scripts\") pod \"keystone-bd6cbdc78-tjc97\" (UID: \"1e075c35-aaca-468e-9276-0ce9bcb6a394\") " pod="horizon-kuttl-tests/keystone-bd6cbdc78-tjc97" Dec 06 15:51:16 crc kubenswrapper[5003]: I1206 15:51:16.542331 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tx6f2\" (UniqueName: \"kubernetes.io/projected/1e075c35-aaca-468e-9276-0ce9bcb6a394-kube-api-access-tx6f2\") pod \"keystone-bd6cbdc78-tjc97\" (UID: \"1e075c35-aaca-468e-9276-0ce9bcb6a394\") " pod="horizon-kuttl-tests/keystone-bd6cbdc78-tjc97" Dec 06 15:51:16 crc kubenswrapper[5003]: I1206 15:51:16.542375 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1e075c35-aaca-468e-9276-0ce9bcb6a394-config-data\") pod \"keystone-bd6cbdc78-tjc97\" (UID: \"1e075c35-aaca-468e-9276-0ce9bcb6a394\") " pod="horizon-kuttl-tests/keystone-bd6cbdc78-tjc97" Dec 06 15:51:16 crc kubenswrapper[5003]: I1206 15:51:16.546932 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/1e075c35-aaca-468e-9276-0ce9bcb6a394-credential-keys\") pod \"keystone-bd6cbdc78-tjc97\" (UID: \"1e075c35-aaca-468e-9276-0ce9bcb6a394\") " pod="horizon-kuttl-tests/keystone-bd6cbdc78-tjc97" Dec 06 15:51:16 crc kubenswrapper[5003]: I1206 15:51:16.547095 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1e075c35-aaca-468e-9276-0ce9bcb6a394-config-data\") pod \"keystone-bd6cbdc78-tjc97\" (UID: \"1e075c35-aaca-468e-9276-0ce9bcb6a394\") " pod="horizon-kuttl-tests/keystone-bd6cbdc78-tjc97" Dec 06 15:51:16 crc kubenswrapper[5003]: I1206 15:51:16.548980 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/1e075c35-aaca-468e-9276-0ce9bcb6a394-fernet-keys\") pod \"keystone-bd6cbdc78-tjc97\" (UID: \"1e075c35-aaca-468e-9276-0ce9bcb6a394\") " pod="horizon-kuttl-tests/keystone-bd6cbdc78-tjc97" Dec 06 15:51:16 crc kubenswrapper[5003]: I1206 15:51:16.552985 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1e075c35-aaca-468e-9276-0ce9bcb6a394-scripts\") pod \"keystone-bd6cbdc78-tjc97\" (UID: \"1e075c35-aaca-468e-9276-0ce9bcb6a394\") " pod="horizon-kuttl-tests/keystone-bd6cbdc78-tjc97" Dec 06 15:51:16 crc kubenswrapper[5003]: I1206 15:51:16.576008 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tx6f2\" (UniqueName: \"kubernetes.io/projected/1e075c35-aaca-468e-9276-0ce9bcb6a394-kube-api-access-tx6f2\") pod \"keystone-bd6cbdc78-tjc97\" (UID: \"1e075c35-aaca-468e-9276-0ce9bcb6a394\") " pod="horizon-kuttl-tests/keystone-bd6cbdc78-tjc97" Dec 06 15:51:16 crc kubenswrapper[5003]: I1206 15:51:16.615717 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="horizon-kuttl-tests/keystone-bd6cbdc78-tjc97" Dec 06 15:51:17 crc kubenswrapper[5003]: I1206 15:51:17.093125 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["horizon-kuttl-tests/keystone-bd6cbdc78-tjc97"] Dec 06 15:51:17 crc kubenswrapper[5003]: I1206 15:51:17.237717 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="horizon-kuttl-tests/keystone-bd6cbdc78-tjc97" event={"ID":"1e075c35-aaca-468e-9276-0ce9bcb6a394","Type":"ContainerStarted","Data":"766e91bc451a7beb901ee88a94c9002cc32ed0b52fab48f3a5ee3c89a8fb5ac4"} Dec 06 15:51:18 crc kubenswrapper[5003]: I1206 15:51:18.245334 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="horizon-kuttl-tests/keystone-bd6cbdc78-tjc97" event={"ID":"1e075c35-aaca-468e-9276-0ce9bcb6a394","Type":"ContainerStarted","Data":"a30cc65566f336c0c5a6b7bfe8ed6f2360be9b5ffd9ccbcd57ef0af0d7ef0b91"} Dec 06 15:51:18 crc kubenswrapper[5003]: I1206 15:51:18.247974 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="horizon-kuttl-tests/keystone-bd6cbdc78-tjc97" Dec 06 15:51:18 crc kubenswrapper[5003]: I1206 15:51:18.262805 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="horizon-kuttl-tests/keystone-bd6cbdc78-tjc97" podStartSLOduration=2.26278774 podStartE2EDuration="2.26278774s" podCreationTimestamp="2025-12-06 15:51:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 15:51:18.259030407 +0000 UTC m=+1156.792384788" watchObservedRunningTime="2025-12-06 15:51:18.26278774 +0000 UTC m=+1156.796142121" Dec 06 15:51:18 crc kubenswrapper[5003]: I1206 15:51:18.572645 5003 patch_prober.go:28] interesting pod/machine-config-daemon-w25db container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 06 15:51:18 crc kubenswrapper[5003]: I1206 15:51:18.572697 5003 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-w25db" podUID="1a047c4d-003e-4668-9b96-945eab34ab68" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 06 15:51:18 crc kubenswrapper[5003]: I1206 15:51:18.572743 5003 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-w25db" Dec 06 15:51:18 crc kubenswrapper[5003]: I1206 15:51:18.573332 5003 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"78b1e361c0889fb22d06542ab25b57331309a42111ebfeb58f0849e826b8ef88"} pod="openshift-machine-config-operator/machine-config-daemon-w25db" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 06 15:51:18 crc kubenswrapper[5003]: I1206 15:51:18.573385 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-w25db" podUID="1a047c4d-003e-4668-9b96-945eab34ab68" containerName="machine-config-daemon" containerID="cri-o://78b1e361c0889fb22d06542ab25b57331309a42111ebfeb58f0849e826b8ef88" gracePeriod=600 Dec 06 15:51:18 crc kubenswrapper[5003]: I1206 15:51:18.758314 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/horizon-operator-controller-manager-5d45bd77f6-vsgls"] Dec 06 15:51:18 crc kubenswrapper[5003]: I1206 15:51:18.759248 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-5d45bd77f6-vsgls" Dec 06 15:51:18 crc kubenswrapper[5003]: I1206 15:51:18.761148 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"horizon-operator-controller-manager-dockercfg-lfz2m" Dec 06 15:51:18 crc kubenswrapper[5003]: I1206 15:51:18.761341 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"horizon-operator-controller-manager-service-cert" Dec 06 15:51:18 crc kubenswrapper[5003]: I1206 15:51:18.778916 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-5d45bd77f6-vsgls"] Dec 06 15:51:18 crc kubenswrapper[5003]: I1206 15:51:18.873222 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/eada1e4a-eb19-4b1e-868d-31d913d7b85e-webhook-cert\") pod \"horizon-operator-controller-manager-5d45bd77f6-vsgls\" (UID: \"eada1e4a-eb19-4b1e-868d-31d913d7b85e\") " pod="openstack-operators/horizon-operator-controller-manager-5d45bd77f6-vsgls" Dec 06 15:51:18 crc kubenswrapper[5003]: I1206 15:51:18.873281 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8mxdl\" (UniqueName: \"kubernetes.io/projected/eada1e4a-eb19-4b1e-868d-31d913d7b85e-kube-api-access-8mxdl\") pod \"horizon-operator-controller-manager-5d45bd77f6-vsgls\" (UID: \"eada1e4a-eb19-4b1e-868d-31d913d7b85e\") " pod="openstack-operators/horizon-operator-controller-manager-5d45bd77f6-vsgls" Dec 06 15:51:18 crc kubenswrapper[5003]: I1206 15:51:18.873304 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/eada1e4a-eb19-4b1e-868d-31d913d7b85e-apiservice-cert\") pod \"horizon-operator-controller-manager-5d45bd77f6-vsgls\" (UID: \"eada1e4a-eb19-4b1e-868d-31d913d7b85e\") " pod="openstack-operators/horizon-operator-controller-manager-5d45bd77f6-vsgls" Dec 06 15:51:18 crc kubenswrapper[5003]: I1206 15:51:18.974829 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/eada1e4a-eb19-4b1e-868d-31d913d7b85e-webhook-cert\") pod \"horizon-operator-controller-manager-5d45bd77f6-vsgls\" (UID: \"eada1e4a-eb19-4b1e-868d-31d913d7b85e\") " pod="openstack-operators/horizon-operator-controller-manager-5d45bd77f6-vsgls" Dec 06 15:51:18 crc kubenswrapper[5003]: I1206 15:51:18.974901 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8mxdl\" (UniqueName: \"kubernetes.io/projected/eada1e4a-eb19-4b1e-868d-31d913d7b85e-kube-api-access-8mxdl\") pod \"horizon-operator-controller-manager-5d45bd77f6-vsgls\" (UID: \"eada1e4a-eb19-4b1e-868d-31d913d7b85e\") " pod="openstack-operators/horizon-operator-controller-manager-5d45bd77f6-vsgls" Dec 06 15:51:18 crc kubenswrapper[5003]: I1206 15:51:18.974929 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/eada1e4a-eb19-4b1e-868d-31d913d7b85e-apiservice-cert\") pod \"horizon-operator-controller-manager-5d45bd77f6-vsgls\" (UID: \"eada1e4a-eb19-4b1e-868d-31d913d7b85e\") " pod="openstack-operators/horizon-operator-controller-manager-5d45bd77f6-vsgls" Dec 06 15:51:18 crc kubenswrapper[5003]: I1206 15:51:18.992902 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/eada1e4a-eb19-4b1e-868d-31d913d7b85e-webhook-cert\") pod \"horizon-operator-controller-manager-5d45bd77f6-vsgls\" (UID: \"eada1e4a-eb19-4b1e-868d-31d913d7b85e\") " pod="openstack-operators/horizon-operator-controller-manager-5d45bd77f6-vsgls" Dec 06 15:51:18 crc kubenswrapper[5003]: I1206 15:51:18.993600 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/eada1e4a-eb19-4b1e-868d-31d913d7b85e-apiservice-cert\") pod \"horizon-operator-controller-manager-5d45bd77f6-vsgls\" (UID: \"eada1e4a-eb19-4b1e-868d-31d913d7b85e\") " pod="openstack-operators/horizon-operator-controller-manager-5d45bd77f6-vsgls" Dec 06 15:51:19 crc kubenswrapper[5003]: I1206 15:51:19.003663 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8mxdl\" (UniqueName: \"kubernetes.io/projected/eada1e4a-eb19-4b1e-868d-31d913d7b85e-kube-api-access-8mxdl\") pod \"horizon-operator-controller-manager-5d45bd77f6-vsgls\" (UID: \"eada1e4a-eb19-4b1e-868d-31d913d7b85e\") " pod="openstack-operators/horizon-operator-controller-manager-5d45bd77f6-vsgls" Dec 06 15:51:19 crc kubenswrapper[5003]: I1206 15:51:19.088098 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-5d45bd77f6-vsgls" Dec 06 15:51:19 crc kubenswrapper[5003]: I1206 15:51:19.255364 5003 generic.go:334] "Generic (PLEG): container finished" podID="1a047c4d-003e-4668-9b96-945eab34ab68" containerID="78b1e361c0889fb22d06542ab25b57331309a42111ebfeb58f0849e826b8ef88" exitCode=0 Dec 06 15:51:19 crc kubenswrapper[5003]: I1206 15:51:19.255714 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-w25db" event={"ID":"1a047c4d-003e-4668-9b96-945eab34ab68","Type":"ContainerDied","Data":"78b1e361c0889fb22d06542ab25b57331309a42111ebfeb58f0849e826b8ef88"} Dec 06 15:51:19 crc kubenswrapper[5003]: I1206 15:51:19.255796 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-w25db" event={"ID":"1a047c4d-003e-4668-9b96-945eab34ab68","Type":"ContainerStarted","Data":"fda5b3a83c2db0cd7e8cd10cafa27a87f57daf7b92848cde69fdb9048350b316"} Dec 06 15:51:19 crc kubenswrapper[5003]: I1206 15:51:19.255823 5003 scope.go:117] "RemoveContainer" containerID="bf4c0e939e0839bd8579c450bf673f46cb54e6312b28fa28edd3fa3c1fe6713b" Dec 06 15:51:19 crc kubenswrapper[5003]: I1206 15:51:19.545173 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-5d45bd77f6-vsgls"] Dec 06 15:51:19 crc kubenswrapper[5003]: W1206 15:51:19.551169 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podeada1e4a_eb19_4b1e_868d_31d913d7b85e.slice/crio-265af085402c7fdc21be5171e44306de446435758f000bbae65c4a7118659b94 WatchSource:0}: Error finding container 265af085402c7fdc21be5171e44306de446435758f000bbae65c4a7118659b94: Status 404 returned error can't find the container with id 265af085402c7fdc21be5171e44306de446435758f000bbae65c4a7118659b94 Dec 06 15:51:20 crc kubenswrapper[5003]: I1206 15:51:20.288949 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-5d45bd77f6-vsgls" event={"ID":"eada1e4a-eb19-4b1e-868d-31d913d7b85e","Type":"ContainerStarted","Data":"265af085402c7fdc21be5171e44306de446435758f000bbae65c4a7118659b94"} Dec 06 15:51:23 crc kubenswrapper[5003]: I1206 15:51:23.334691 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-5d45bd77f6-vsgls" event={"ID":"eada1e4a-eb19-4b1e-868d-31d913d7b85e","Type":"ContainerStarted","Data":"ddf6e0e2d9b57aabeef3242ccfcfb1a1c3ab1d2bb8eae8d911945e410c7e98ce"} Dec 06 15:51:23 crc kubenswrapper[5003]: I1206 15:51:23.335608 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/horizon-operator-controller-manager-5d45bd77f6-vsgls" Dec 06 15:51:23 crc kubenswrapper[5003]: I1206 15:51:23.369725 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/horizon-operator-controller-manager-5d45bd77f6-vsgls" podStartSLOduration=2.704954104 podStartE2EDuration="5.369702715s" podCreationTimestamp="2025-12-06 15:51:18 +0000 UTC" firstStartedPulling="2025-12-06 15:51:19.553814018 +0000 UTC m=+1158.087168399" lastFinishedPulling="2025-12-06 15:51:22.218562629 +0000 UTC m=+1160.751917010" observedRunningTime="2025-12-06 15:51:23.364037399 +0000 UTC m=+1161.897391790" watchObservedRunningTime="2025-12-06 15:51:23.369702715 +0000 UTC m=+1161.903057086" Dec 06 15:51:29 crc kubenswrapper[5003]: I1206 15:51:29.092826 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/horizon-operator-controller-manager-5d45bd77f6-vsgls" Dec 06 15:51:34 crc kubenswrapper[5003]: I1206 15:51:34.091686 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["horizon-kuttl-tests/horizon-6675bd755-2tpjz"] Dec 06 15:51:34 crc kubenswrapper[5003]: I1206 15:51:34.096690 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="horizon-kuttl-tests/horizon-6675bd755-2tpjz" Dec 06 15:51:34 crc kubenswrapper[5003]: I1206 15:51:34.100271 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"horizon-kuttl-tests"/"horizon-scripts" Dec 06 15:51:34 crc kubenswrapper[5003]: I1206 15:51:34.100602 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"horizon-kuttl-tests"/"horizon-config-data" Dec 06 15:51:34 crc kubenswrapper[5003]: I1206 15:51:34.101735 5003 reflector.go:368] Caches populated for *v1.Secret from object-"horizon-kuttl-tests"/"horizon" Dec 06 15:51:34 crc kubenswrapper[5003]: I1206 15:51:34.101735 5003 reflector.go:368] Caches populated for *v1.Secret from object-"horizon-kuttl-tests"/"horizon-horizon-dockercfg-2b6r2" Dec 06 15:51:34 crc kubenswrapper[5003]: I1206 15:51:34.103766 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["horizon-kuttl-tests/horizon-6675bd755-2tpjz"] Dec 06 15:51:34 crc kubenswrapper[5003]: I1206 15:51:34.172734 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["horizon-kuttl-tests/horizon-8bb8556c5-r5b4t"] Dec 06 15:51:34 crc kubenswrapper[5003]: I1206 15:51:34.174209 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="horizon-kuttl-tests/horizon-8bb8556c5-r5b4t" Dec 06 15:51:34 crc kubenswrapper[5003]: I1206 15:51:34.187611 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["horizon-kuttl-tests/horizon-8bb8556c5-r5b4t"] Dec 06 15:51:34 crc kubenswrapper[5003]: I1206 15:51:34.229984 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/7b85f3e6-54eb-4b5a-8f44-6614366478c2-scripts\") pod \"horizon-6675bd755-2tpjz\" (UID: \"7b85f3e6-54eb-4b5a-8f44-6614366478c2\") " pod="horizon-kuttl-tests/horizon-6675bd755-2tpjz" Dec 06 15:51:34 crc kubenswrapper[5003]: I1206 15:51:34.230035 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d505c9e5-a4c0-4b9b-8725-3e7ca9dc82d4-scripts\") pod \"horizon-8bb8556c5-r5b4t\" (UID: \"d505c9e5-a4c0-4b9b-8725-3e7ca9dc82d4\") " pod="horizon-kuttl-tests/horizon-8bb8556c5-r5b4t" Dec 06 15:51:34 crc kubenswrapper[5003]: I1206 15:51:34.230060 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ngpx8\" (UniqueName: \"kubernetes.io/projected/7b85f3e6-54eb-4b5a-8f44-6614366478c2-kube-api-access-ngpx8\") pod \"horizon-6675bd755-2tpjz\" (UID: \"7b85f3e6-54eb-4b5a-8f44-6614366478c2\") " pod="horizon-kuttl-tests/horizon-6675bd755-2tpjz" Dec 06 15:51:34 crc kubenswrapper[5003]: I1206 15:51:34.230081 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/d505c9e5-a4c0-4b9b-8725-3e7ca9dc82d4-config-data\") pod \"horizon-8bb8556c5-r5b4t\" (UID: \"d505c9e5-a4c0-4b9b-8725-3e7ca9dc82d4\") " pod="horizon-kuttl-tests/horizon-8bb8556c5-r5b4t" Dec 06 15:51:34 crc kubenswrapper[5003]: I1206 15:51:34.230111 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ls5rn\" (UniqueName: \"kubernetes.io/projected/d505c9e5-a4c0-4b9b-8725-3e7ca9dc82d4-kube-api-access-ls5rn\") pod \"horizon-8bb8556c5-r5b4t\" (UID: \"d505c9e5-a4c0-4b9b-8725-3e7ca9dc82d4\") " pod="horizon-kuttl-tests/horizon-8bb8556c5-r5b4t" Dec 06 15:51:34 crc kubenswrapper[5003]: I1206 15:51:34.230134 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/7b85f3e6-54eb-4b5a-8f44-6614366478c2-horizon-secret-key\") pod \"horizon-6675bd755-2tpjz\" (UID: \"7b85f3e6-54eb-4b5a-8f44-6614366478c2\") " pod="horizon-kuttl-tests/horizon-6675bd755-2tpjz" Dec 06 15:51:34 crc kubenswrapper[5003]: I1206 15:51:34.230188 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/d505c9e5-a4c0-4b9b-8725-3e7ca9dc82d4-horizon-secret-key\") pod \"horizon-8bb8556c5-r5b4t\" (UID: \"d505c9e5-a4c0-4b9b-8725-3e7ca9dc82d4\") " pod="horizon-kuttl-tests/horizon-8bb8556c5-r5b4t" Dec 06 15:51:34 crc kubenswrapper[5003]: I1206 15:51:34.230204 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7b85f3e6-54eb-4b5a-8f44-6614366478c2-logs\") pod \"horizon-6675bd755-2tpjz\" (UID: \"7b85f3e6-54eb-4b5a-8f44-6614366478c2\") " pod="horizon-kuttl-tests/horizon-6675bd755-2tpjz" Dec 06 15:51:34 crc kubenswrapper[5003]: I1206 15:51:34.230227 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/7b85f3e6-54eb-4b5a-8f44-6614366478c2-config-data\") pod \"horizon-6675bd755-2tpjz\" (UID: \"7b85f3e6-54eb-4b5a-8f44-6614366478c2\") " pod="horizon-kuttl-tests/horizon-6675bd755-2tpjz" Dec 06 15:51:34 crc kubenswrapper[5003]: I1206 15:51:34.230248 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d505c9e5-a4c0-4b9b-8725-3e7ca9dc82d4-logs\") pod \"horizon-8bb8556c5-r5b4t\" (UID: \"d505c9e5-a4c0-4b9b-8725-3e7ca9dc82d4\") " pod="horizon-kuttl-tests/horizon-8bb8556c5-r5b4t" Dec 06 15:51:34 crc kubenswrapper[5003]: I1206 15:51:34.331712 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/7b85f3e6-54eb-4b5a-8f44-6614366478c2-scripts\") pod \"horizon-6675bd755-2tpjz\" (UID: \"7b85f3e6-54eb-4b5a-8f44-6614366478c2\") " pod="horizon-kuttl-tests/horizon-6675bd755-2tpjz" Dec 06 15:51:34 crc kubenswrapper[5003]: I1206 15:51:34.332178 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d505c9e5-a4c0-4b9b-8725-3e7ca9dc82d4-scripts\") pod \"horizon-8bb8556c5-r5b4t\" (UID: \"d505c9e5-a4c0-4b9b-8725-3e7ca9dc82d4\") " pod="horizon-kuttl-tests/horizon-8bb8556c5-r5b4t" Dec 06 15:51:34 crc kubenswrapper[5003]: I1206 15:51:34.332376 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ngpx8\" (UniqueName: \"kubernetes.io/projected/7b85f3e6-54eb-4b5a-8f44-6614366478c2-kube-api-access-ngpx8\") pod \"horizon-6675bd755-2tpjz\" (UID: \"7b85f3e6-54eb-4b5a-8f44-6614366478c2\") " pod="horizon-kuttl-tests/horizon-6675bd755-2tpjz" Dec 06 15:51:34 crc kubenswrapper[5003]: I1206 15:51:34.332589 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/d505c9e5-a4c0-4b9b-8725-3e7ca9dc82d4-config-data\") pod \"horizon-8bb8556c5-r5b4t\" (UID: \"d505c9e5-a4c0-4b9b-8725-3e7ca9dc82d4\") " pod="horizon-kuttl-tests/horizon-8bb8556c5-r5b4t" Dec 06 15:51:34 crc kubenswrapper[5003]: I1206 15:51:34.332854 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ls5rn\" (UniqueName: \"kubernetes.io/projected/d505c9e5-a4c0-4b9b-8725-3e7ca9dc82d4-kube-api-access-ls5rn\") pod \"horizon-8bb8556c5-r5b4t\" (UID: \"d505c9e5-a4c0-4b9b-8725-3e7ca9dc82d4\") " pod="horizon-kuttl-tests/horizon-8bb8556c5-r5b4t" Dec 06 15:51:34 crc kubenswrapper[5003]: I1206 15:51:34.333030 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d505c9e5-a4c0-4b9b-8725-3e7ca9dc82d4-scripts\") pod \"horizon-8bb8556c5-r5b4t\" (UID: \"d505c9e5-a4c0-4b9b-8725-3e7ca9dc82d4\") " pod="horizon-kuttl-tests/horizon-8bb8556c5-r5b4t" Dec 06 15:51:34 crc kubenswrapper[5003]: I1206 15:51:34.332656 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/7b85f3e6-54eb-4b5a-8f44-6614366478c2-scripts\") pod \"horizon-6675bd755-2tpjz\" (UID: \"7b85f3e6-54eb-4b5a-8f44-6614366478c2\") " pod="horizon-kuttl-tests/horizon-6675bd755-2tpjz" Dec 06 15:51:34 crc kubenswrapper[5003]: I1206 15:51:34.333458 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/7b85f3e6-54eb-4b5a-8f44-6614366478c2-horizon-secret-key\") pod \"horizon-6675bd755-2tpjz\" (UID: \"7b85f3e6-54eb-4b5a-8f44-6614366478c2\") " pod="horizon-kuttl-tests/horizon-6675bd755-2tpjz" Dec 06 15:51:34 crc kubenswrapper[5003]: I1206 15:51:34.333547 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/d505c9e5-a4c0-4b9b-8725-3e7ca9dc82d4-horizon-secret-key\") pod \"horizon-8bb8556c5-r5b4t\" (UID: \"d505c9e5-a4c0-4b9b-8725-3e7ca9dc82d4\") " pod="horizon-kuttl-tests/horizon-8bb8556c5-r5b4t" Dec 06 15:51:34 crc kubenswrapper[5003]: I1206 15:51:34.333587 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7b85f3e6-54eb-4b5a-8f44-6614366478c2-logs\") pod \"horizon-6675bd755-2tpjz\" (UID: \"7b85f3e6-54eb-4b5a-8f44-6614366478c2\") " pod="horizon-kuttl-tests/horizon-6675bd755-2tpjz" Dec 06 15:51:34 crc kubenswrapper[5003]: I1206 15:51:34.333641 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/7b85f3e6-54eb-4b5a-8f44-6614366478c2-config-data\") pod \"horizon-6675bd755-2tpjz\" (UID: \"7b85f3e6-54eb-4b5a-8f44-6614366478c2\") " pod="horizon-kuttl-tests/horizon-6675bd755-2tpjz" Dec 06 15:51:34 crc kubenswrapper[5003]: I1206 15:51:34.333701 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d505c9e5-a4c0-4b9b-8725-3e7ca9dc82d4-logs\") pod \"horizon-8bb8556c5-r5b4t\" (UID: \"d505c9e5-a4c0-4b9b-8725-3e7ca9dc82d4\") " pod="horizon-kuttl-tests/horizon-8bb8556c5-r5b4t" Dec 06 15:51:34 crc kubenswrapper[5003]: I1206 15:51:34.333866 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/d505c9e5-a4c0-4b9b-8725-3e7ca9dc82d4-config-data\") pod \"horizon-8bb8556c5-r5b4t\" (UID: \"d505c9e5-a4c0-4b9b-8725-3e7ca9dc82d4\") " pod="horizon-kuttl-tests/horizon-8bb8556c5-r5b4t" Dec 06 15:51:34 crc kubenswrapper[5003]: I1206 15:51:34.334356 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7b85f3e6-54eb-4b5a-8f44-6614366478c2-logs\") pod \"horizon-6675bd755-2tpjz\" (UID: \"7b85f3e6-54eb-4b5a-8f44-6614366478c2\") " pod="horizon-kuttl-tests/horizon-6675bd755-2tpjz" Dec 06 15:51:34 crc kubenswrapper[5003]: I1206 15:51:34.334542 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d505c9e5-a4c0-4b9b-8725-3e7ca9dc82d4-logs\") pod \"horizon-8bb8556c5-r5b4t\" (UID: \"d505c9e5-a4c0-4b9b-8725-3e7ca9dc82d4\") " pod="horizon-kuttl-tests/horizon-8bb8556c5-r5b4t" Dec 06 15:51:34 crc kubenswrapper[5003]: I1206 15:51:34.334720 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/7b85f3e6-54eb-4b5a-8f44-6614366478c2-config-data\") pod \"horizon-6675bd755-2tpjz\" (UID: \"7b85f3e6-54eb-4b5a-8f44-6614366478c2\") " pod="horizon-kuttl-tests/horizon-6675bd755-2tpjz" Dec 06 15:51:34 crc kubenswrapper[5003]: I1206 15:51:34.340334 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/7b85f3e6-54eb-4b5a-8f44-6614366478c2-horizon-secret-key\") pod \"horizon-6675bd755-2tpjz\" (UID: \"7b85f3e6-54eb-4b5a-8f44-6614366478c2\") " pod="horizon-kuttl-tests/horizon-6675bd755-2tpjz" Dec 06 15:51:34 crc kubenswrapper[5003]: I1206 15:51:34.343085 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/d505c9e5-a4c0-4b9b-8725-3e7ca9dc82d4-horizon-secret-key\") pod \"horizon-8bb8556c5-r5b4t\" (UID: \"d505c9e5-a4c0-4b9b-8725-3e7ca9dc82d4\") " pod="horizon-kuttl-tests/horizon-8bb8556c5-r5b4t" Dec 06 15:51:34 crc kubenswrapper[5003]: I1206 15:51:34.350324 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ngpx8\" (UniqueName: \"kubernetes.io/projected/7b85f3e6-54eb-4b5a-8f44-6614366478c2-kube-api-access-ngpx8\") pod \"horizon-6675bd755-2tpjz\" (UID: \"7b85f3e6-54eb-4b5a-8f44-6614366478c2\") " pod="horizon-kuttl-tests/horizon-6675bd755-2tpjz" Dec 06 15:51:34 crc kubenswrapper[5003]: I1206 15:51:34.352453 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ls5rn\" (UniqueName: \"kubernetes.io/projected/d505c9e5-a4c0-4b9b-8725-3e7ca9dc82d4-kube-api-access-ls5rn\") pod \"horizon-8bb8556c5-r5b4t\" (UID: \"d505c9e5-a4c0-4b9b-8725-3e7ca9dc82d4\") " pod="horizon-kuttl-tests/horizon-8bb8556c5-r5b4t" Dec 06 15:51:34 crc kubenswrapper[5003]: I1206 15:51:34.420355 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="horizon-kuttl-tests/horizon-6675bd755-2tpjz" Dec 06 15:51:34 crc kubenswrapper[5003]: I1206 15:51:34.497566 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="horizon-kuttl-tests/horizon-8bb8556c5-r5b4t" Dec 06 15:51:34 crc kubenswrapper[5003]: I1206 15:51:34.951010 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["horizon-kuttl-tests/horizon-6675bd755-2tpjz"] Dec 06 15:51:34 crc kubenswrapper[5003]: W1206 15:51:34.957814 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7b85f3e6_54eb_4b5a_8f44_6614366478c2.slice/crio-24727e5dd571c7e036d4f31a98c20f3f9345a6b913bc7c9a290c5b1d4d401e48 WatchSource:0}: Error finding container 24727e5dd571c7e036d4f31a98c20f3f9345a6b913bc7c9a290c5b1d4d401e48: Status 404 returned error can't find the container with id 24727e5dd571c7e036d4f31a98c20f3f9345a6b913bc7c9a290c5b1d4d401e48 Dec 06 15:51:35 crc kubenswrapper[5003]: I1206 15:51:35.037130 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["horizon-kuttl-tests/horizon-8bb8556c5-r5b4t"] Dec 06 15:51:35 crc kubenswrapper[5003]: W1206 15:51:35.042706 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd505c9e5_a4c0_4b9b_8725_3e7ca9dc82d4.slice/crio-30c7e7033a50894667f352dcf7bcb7a781ce980d5f7cba23b9dc03e1c8fba006 WatchSource:0}: Error finding container 30c7e7033a50894667f352dcf7bcb7a781ce980d5f7cba23b9dc03e1c8fba006: Status 404 returned error can't find the container with id 30c7e7033a50894667f352dcf7bcb7a781ce980d5f7cba23b9dc03e1c8fba006 Dec 06 15:51:35 crc kubenswrapper[5003]: I1206 15:51:35.424106 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="horizon-kuttl-tests/horizon-8bb8556c5-r5b4t" event={"ID":"d505c9e5-a4c0-4b9b-8725-3e7ca9dc82d4","Type":"ContainerStarted","Data":"30c7e7033a50894667f352dcf7bcb7a781ce980d5f7cba23b9dc03e1c8fba006"} Dec 06 15:51:35 crc kubenswrapper[5003]: I1206 15:51:35.425332 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="horizon-kuttl-tests/horizon-6675bd755-2tpjz" event={"ID":"7b85f3e6-54eb-4b5a-8f44-6614366478c2","Type":"ContainerStarted","Data":"24727e5dd571c7e036d4f31a98c20f3f9345a6b913bc7c9a290c5b1d4d401e48"} Dec 06 15:51:45 crc kubenswrapper[5003]: I1206 15:51:45.534729 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="horizon-kuttl-tests/horizon-8bb8556c5-r5b4t" event={"ID":"d505c9e5-a4c0-4b9b-8725-3e7ca9dc82d4","Type":"ContainerStarted","Data":"e468e6893d1874e63d8d54ae8a7def3d20e08fcab2895d185fd78404468510a0"} Dec 06 15:51:45 crc kubenswrapper[5003]: I1206 15:51:45.536073 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="horizon-kuttl-tests/horizon-6675bd755-2tpjz" event={"ID":"7b85f3e6-54eb-4b5a-8f44-6614366478c2","Type":"ContainerStarted","Data":"4d627c747e76ad4f32ac8caaab128a79f30f78c4a3de60f9f5e7bd950ba47cc0"} Dec 06 15:51:46 crc kubenswrapper[5003]: I1206 15:51:46.610738 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="horizon-kuttl-tests/horizon-6675bd755-2tpjz" event={"ID":"7b85f3e6-54eb-4b5a-8f44-6614366478c2","Type":"ContainerStarted","Data":"8449acd0a696ee69116edb9a8a5c0faa762bd8cbf814713242cac7adbab2008e"} Dec 06 15:51:46 crc kubenswrapper[5003]: I1206 15:51:46.614374 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="horizon-kuttl-tests/horizon-8bb8556c5-r5b4t" event={"ID":"d505c9e5-a4c0-4b9b-8725-3e7ca9dc82d4","Type":"ContainerStarted","Data":"e7ab6e18d000fecd82f5425cabab6420e08be9fe5fb7152b7ddbf297d280d531"} Dec 06 15:51:46 crc kubenswrapper[5003]: I1206 15:51:46.633408 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="horizon-kuttl-tests/horizon-6675bd755-2tpjz" podStartSLOduration=2.534489637 podStartE2EDuration="12.633387168s" podCreationTimestamp="2025-12-06 15:51:34 +0000 UTC" firstStartedPulling="2025-12-06 15:51:34.959810083 +0000 UTC m=+1173.493164464" lastFinishedPulling="2025-12-06 15:51:45.058707614 +0000 UTC m=+1183.592061995" observedRunningTime="2025-12-06 15:51:46.627730903 +0000 UTC m=+1185.161085294" watchObservedRunningTime="2025-12-06 15:51:46.633387168 +0000 UTC m=+1185.166741549" Dec 06 15:51:46 crc kubenswrapper[5003]: I1206 15:51:46.645326 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="horizon-kuttl-tests/horizon-8bb8556c5-r5b4t" podStartSLOduration=2.540268716 podStartE2EDuration="12.645308035s" podCreationTimestamp="2025-12-06 15:51:34 +0000 UTC" firstStartedPulling="2025-12-06 15:51:35.044824394 +0000 UTC m=+1173.578178775" lastFinishedPulling="2025-12-06 15:51:45.149863713 +0000 UTC m=+1183.683218094" observedRunningTime="2025-12-06 15:51:46.642843977 +0000 UTC m=+1185.176198418" watchObservedRunningTime="2025-12-06 15:51:46.645308035 +0000 UTC m=+1185.178662416" Dec 06 15:51:48 crc kubenswrapper[5003]: I1206 15:51:48.076873 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="horizon-kuttl-tests/keystone-bd6cbdc78-tjc97" Dec 06 15:51:54 crc kubenswrapper[5003]: I1206 15:51:54.421381 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="horizon-kuttl-tests/horizon-6675bd755-2tpjz" Dec 06 15:51:54 crc kubenswrapper[5003]: I1206 15:51:54.422173 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="horizon-kuttl-tests/horizon-6675bd755-2tpjz" Dec 06 15:51:54 crc kubenswrapper[5003]: I1206 15:51:54.498502 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="horizon-kuttl-tests/horizon-8bb8556c5-r5b4t" Dec 06 15:51:54 crc kubenswrapper[5003]: I1206 15:51:54.498565 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="horizon-kuttl-tests/horizon-8bb8556c5-r5b4t" Dec 06 15:52:06 crc kubenswrapper[5003]: I1206 15:52:06.185228 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="horizon-kuttl-tests/horizon-8bb8556c5-r5b4t" Dec 06 15:52:06 crc kubenswrapper[5003]: I1206 15:52:06.208019 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="horizon-kuttl-tests/horizon-6675bd755-2tpjz" Dec 06 15:52:07 crc kubenswrapper[5003]: I1206 15:52:07.820485 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="horizon-kuttl-tests/horizon-8bb8556c5-r5b4t" Dec 06 15:52:07 crc kubenswrapper[5003]: I1206 15:52:07.875264 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["horizon-kuttl-tests/horizon-6675bd755-2tpjz"] Dec 06 15:52:07 crc kubenswrapper[5003]: I1206 15:52:07.875584 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="horizon-kuttl-tests/horizon-6675bd755-2tpjz" podUID="7b85f3e6-54eb-4b5a-8f44-6614366478c2" containerName="horizon-log" containerID="cri-o://4d627c747e76ad4f32ac8caaab128a79f30f78c4a3de60f9f5e7bd950ba47cc0" gracePeriod=30 Dec 06 15:52:07 crc kubenswrapper[5003]: I1206 15:52:07.876571 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="horizon-kuttl-tests/horizon-6675bd755-2tpjz" podUID="7b85f3e6-54eb-4b5a-8f44-6614366478c2" containerName="horizon" containerID="cri-o://8449acd0a696ee69116edb9a8a5c0faa762bd8cbf814713242cac7adbab2008e" gracePeriod=30 Dec 06 15:52:07 crc kubenswrapper[5003]: I1206 15:52:07.882894 5003 prober.go:107] "Probe failed" probeType="Readiness" pod="horizon-kuttl-tests/horizon-6675bd755-2tpjz" podUID="7b85f3e6-54eb-4b5a-8f44-6614366478c2" containerName="horizon" probeResult="failure" output="Get \"http://10.217.0.85:8080/dashboard/auth/login/?next=/dashboard/\": EOF" Dec 06 15:52:10 crc kubenswrapper[5003]: I1206 15:52:10.891102 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["horizon-kuttl-tests/horizon-845cfdcdb-2kc8g"] Dec 06 15:52:10 crc kubenswrapper[5003]: I1206 15:52:10.892818 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="horizon-kuttl-tests/horizon-845cfdcdb-2kc8g" Dec 06 15:52:10 crc kubenswrapper[5003]: I1206 15:52:10.894709 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"horizon-kuttl-tests"/"horizon-policy" Dec 06 15:52:10 crc kubenswrapper[5003]: I1206 15:52:10.911893 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["horizon-kuttl-tests/horizon-845cfdcdb-2kc8g"] Dec 06 15:52:10 crc kubenswrapper[5003]: I1206 15:52:10.967797 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["horizon-kuttl-tests/horizon-8bb8556c5-r5b4t"] Dec 06 15:52:10 crc kubenswrapper[5003]: I1206 15:52:10.968404 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="horizon-kuttl-tests/horizon-8bb8556c5-r5b4t" podUID="d505c9e5-a4c0-4b9b-8725-3e7ca9dc82d4" containerName="horizon-log" containerID="cri-o://e468e6893d1874e63d8d54ae8a7def3d20e08fcab2895d185fd78404468510a0" gracePeriod=30 Dec 06 15:52:10 crc kubenswrapper[5003]: I1206 15:52:10.968566 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="horizon-kuttl-tests/horizon-8bb8556c5-r5b4t" podUID="d505c9e5-a4c0-4b9b-8725-3e7ca9dc82d4" containerName="horizon" containerID="cri-o://e7ab6e18d000fecd82f5425cabab6420e08be9fe5fb7152b7ddbf297d280d531" gracePeriod=30 Dec 06 15:52:10 crc kubenswrapper[5003]: I1206 15:52:10.979286 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["horizon-kuttl-tests/horizon-845cfdcdb-2kc8g"] Dec 06 15:52:10 crc kubenswrapper[5003]: E1206 15:52:10.980168 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[config-data horizon-secret-key kube-api-access-85x78 logs policy scripts], unattached volumes=[], failed to process volumes=[]: context canceled" pod="horizon-kuttl-tests/horizon-845cfdcdb-2kc8g" podUID="99fc56f4-d509-4545-8044-3a5efe6fceb7" Dec 06 15:52:11 crc kubenswrapper[5003]: I1206 15:52:11.005450 5003 prober.go:107] "Probe failed" probeType="Readiness" pod="horizon-kuttl-tests/horizon-6675bd755-2tpjz" podUID="7b85f3e6-54eb-4b5a-8f44-6614366478c2" containerName="horizon" probeResult="failure" output="Get \"http://10.217.0.85:8080/dashboard/auth/login/?next=/dashboard/\": read tcp 10.217.0.2:50964->10.217.0.85:8080: read: connection reset by peer" Dec 06 15:52:11 crc kubenswrapper[5003]: I1206 15:52:11.033609 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/99fc56f4-d509-4545-8044-3a5efe6fceb7-logs\") pod \"horizon-845cfdcdb-2kc8g\" (UID: \"99fc56f4-d509-4545-8044-3a5efe6fceb7\") " pod="horizon-kuttl-tests/horizon-845cfdcdb-2kc8g" Dec 06 15:52:11 crc kubenswrapper[5003]: I1206 15:52:11.033661 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"policy\" (UniqueName: \"kubernetes.io/configmap/99fc56f4-d509-4545-8044-3a5efe6fceb7-policy\") pod \"horizon-845cfdcdb-2kc8g\" (UID: \"99fc56f4-d509-4545-8044-3a5efe6fceb7\") " pod="horizon-kuttl-tests/horizon-845cfdcdb-2kc8g" Dec 06 15:52:11 crc kubenswrapper[5003]: I1206 15:52:11.033734 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/99fc56f4-d509-4545-8044-3a5efe6fceb7-scripts\") pod \"horizon-845cfdcdb-2kc8g\" (UID: \"99fc56f4-d509-4545-8044-3a5efe6fceb7\") " pod="horizon-kuttl-tests/horizon-845cfdcdb-2kc8g" Dec 06 15:52:11 crc kubenswrapper[5003]: I1206 15:52:11.033773 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/99fc56f4-d509-4545-8044-3a5efe6fceb7-config-data\") pod \"horizon-845cfdcdb-2kc8g\" (UID: \"99fc56f4-d509-4545-8044-3a5efe6fceb7\") " pod="horizon-kuttl-tests/horizon-845cfdcdb-2kc8g" Dec 06 15:52:11 crc kubenswrapper[5003]: I1206 15:52:11.033816 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/99fc56f4-d509-4545-8044-3a5efe6fceb7-horizon-secret-key\") pod \"horizon-845cfdcdb-2kc8g\" (UID: \"99fc56f4-d509-4545-8044-3a5efe6fceb7\") " pod="horizon-kuttl-tests/horizon-845cfdcdb-2kc8g" Dec 06 15:52:11 crc kubenswrapper[5003]: I1206 15:52:11.033892 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-85x78\" (UniqueName: \"kubernetes.io/projected/99fc56f4-d509-4545-8044-3a5efe6fceb7-kube-api-access-85x78\") pod \"horizon-845cfdcdb-2kc8g\" (UID: \"99fc56f4-d509-4545-8044-3a5efe6fceb7\") " pod="horizon-kuttl-tests/horizon-845cfdcdb-2kc8g" Dec 06 15:52:11 crc kubenswrapper[5003]: I1206 15:52:11.135071 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/99fc56f4-d509-4545-8044-3a5efe6fceb7-scripts\") pod \"horizon-845cfdcdb-2kc8g\" (UID: \"99fc56f4-d509-4545-8044-3a5efe6fceb7\") " pod="horizon-kuttl-tests/horizon-845cfdcdb-2kc8g" Dec 06 15:52:11 crc kubenswrapper[5003]: I1206 15:52:11.135138 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/99fc56f4-d509-4545-8044-3a5efe6fceb7-config-data\") pod \"horizon-845cfdcdb-2kc8g\" (UID: \"99fc56f4-d509-4545-8044-3a5efe6fceb7\") " pod="horizon-kuttl-tests/horizon-845cfdcdb-2kc8g" Dec 06 15:52:11 crc kubenswrapper[5003]: I1206 15:52:11.135186 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/99fc56f4-d509-4545-8044-3a5efe6fceb7-horizon-secret-key\") pod \"horizon-845cfdcdb-2kc8g\" (UID: \"99fc56f4-d509-4545-8044-3a5efe6fceb7\") " pod="horizon-kuttl-tests/horizon-845cfdcdb-2kc8g" Dec 06 15:52:11 crc kubenswrapper[5003]: E1206 15:52:11.135199 5003 configmap.go:193] Couldn't get configMap horizon-kuttl-tests/horizon-scripts: configmap "horizon-scripts" not found Dec 06 15:52:11 crc kubenswrapper[5003]: I1206 15:52:11.135240 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-85x78\" (UniqueName: \"kubernetes.io/projected/99fc56f4-d509-4545-8044-3a5efe6fceb7-kube-api-access-85x78\") pod \"horizon-845cfdcdb-2kc8g\" (UID: \"99fc56f4-d509-4545-8044-3a5efe6fceb7\") " pod="horizon-kuttl-tests/horizon-845cfdcdb-2kc8g" Dec 06 15:52:11 crc kubenswrapper[5003]: E1206 15:52:11.135251 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/99fc56f4-d509-4545-8044-3a5efe6fceb7-scripts podName:99fc56f4-d509-4545-8044-3a5efe6fceb7 nodeName:}" failed. No retries permitted until 2025-12-06 15:52:11.635233815 +0000 UTC m=+1210.168588196 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "scripts" (UniqueName: "kubernetes.io/configmap/99fc56f4-d509-4545-8044-3a5efe6fceb7-scripts") pod "horizon-845cfdcdb-2kc8g" (UID: "99fc56f4-d509-4545-8044-3a5efe6fceb7") : configmap "horizon-scripts" not found Dec 06 15:52:11 crc kubenswrapper[5003]: I1206 15:52:11.135268 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/99fc56f4-d509-4545-8044-3a5efe6fceb7-logs\") pod \"horizon-845cfdcdb-2kc8g\" (UID: \"99fc56f4-d509-4545-8044-3a5efe6fceb7\") " pod="horizon-kuttl-tests/horizon-845cfdcdb-2kc8g" Dec 06 15:52:11 crc kubenswrapper[5003]: E1206 15:52:11.135278 5003 configmap.go:193] Couldn't get configMap horizon-kuttl-tests/horizon-config-data: configmap "horizon-config-data" not found Dec 06 15:52:11 crc kubenswrapper[5003]: I1206 15:52:11.135309 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"policy\" (UniqueName: \"kubernetes.io/configmap/99fc56f4-d509-4545-8044-3a5efe6fceb7-policy\") pod \"horizon-845cfdcdb-2kc8g\" (UID: \"99fc56f4-d509-4545-8044-3a5efe6fceb7\") " pod="horizon-kuttl-tests/horizon-845cfdcdb-2kc8g" Dec 06 15:52:11 crc kubenswrapper[5003]: E1206 15:52:11.135322 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/99fc56f4-d509-4545-8044-3a5efe6fceb7-config-data podName:99fc56f4-d509-4545-8044-3a5efe6fceb7 nodeName:}" failed. No retries permitted until 2025-12-06 15:52:11.635307157 +0000 UTC m=+1210.168661538 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/99fc56f4-d509-4545-8044-3a5efe6fceb7-config-data") pod "horizon-845cfdcdb-2kc8g" (UID: "99fc56f4-d509-4545-8044-3a5efe6fceb7") : configmap "horizon-config-data" not found Dec 06 15:52:11 crc kubenswrapper[5003]: E1206 15:52:11.135380 5003 secret.go:188] Couldn't get secret horizon-kuttl-tests/horizon: secret "horizon" not found Dec 06 15:52:11 crc kubenswrapper[5003]: E1206 15:52:11.135450 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/99fc56f4-d509-4545-8044-3a5efe6fceb7-horizon-secret-key podName:99fc56f4-d509-4545-8044-3a5efe6fceb7 nodeName:}" failed. No retries permitted until 2025-12-06 15:52:11.63543269 +0000 UTC m=+1210.168787071 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "horizon-secret-key" (UniqueName: "kubernetes.io/secret/99fc56f4-d509-4545-8044-3a5efe6fceb7-horizon-secret-key") pod "horizon-845cfdcdb-2kc8g" (UID: "99fc56f4-d509-4545-8044-3a5efe6fceb7") : secret "horizon" not found Dec 06 15:52:11 crc kubenswrapper[5003]: I1206 15:52:11.135787 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/99fc56f4-d509-4545-8044-3a5efe6fceb7-logs\") pod \"horizon-845cfdcdb-2kc8g\" (UID: \"99fc56f4-d509-4545-8044-3a5efe6fceb7\") " pod="horizon-kuttl-tests/horizon-845cfdcdb-2kc8g" Dec 06 15:52:11 crc kubenswrapper[5003]: I1206 15:52:11.136079 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"policy\" (UniqueName: \"kubernetes.io/configmap/99fc56f4-d509-4545-8044-3a5efe6fceb7-policy\") pod \"horizon-845cfdcdb-2kc8g\" (UID: \"99fc56f4-d509-4545-8044-3a5efe6fceb7\") " pod="horizon-kuttl-tests/horizon-845cfdcdb-2kc8g" Dec 06 15:52:11 crc kubenswrapper[5003]: E1206 15:52:11.140442 5003 projected.go:194] Error preparing data for projected volume kube-api-access-85x78 for pod horizon-kuttl-tests/horizon-845cfdcdb-2kc8g: failed to fetch token: serviceaccounts "horizon-horizon" not found Dec 06 15:52:11 crc kubenswrapper[5003]: E1206 15:52:11.140509 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/99fc56f4-d509-4545-8044-3a5efe6fceb7-kube-api-access-85x78 podName:99fc56f4-d509-4545-8044-3a5efe6fceb7 nodeName:}" failed. No retries permitted until 2025-12-06 15:52:11.640481259 +0000 UTC m=+1210.173835630 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-85x78" (UniqueName: "kubernetes.io/projected/99fc56f4-d509-4545-8044-3a5efe6fceb7-kube-api-access-85x78") pod "horizon-845cfdcdb-2kc8g" (UID: "99fc56f4-d509-4545-8044-3a5efe6fceb7") : failed to fetch token: serviceaccounts "horizon-horizon" not found Dec 06 15:52:11 crc kubenswrapper[5003]: I1206 15:52:11.643268 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/99fc56f4-d509-4545-8044-3a5efe6fceb7-scripts\") pod \"horizon-845cfdcdb-2kc8g\" (UID: \"99fc56f4-d509-4545-8044-3a5efe6fceb7\") " pod="horizon-kuttl-tests/horizon-845cfdcdb-2kc8g" Dec 06 15:52:11 crc kubenswrapper[5003]: I1206 15:52:11.643355 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/99fc56f4-d509-4545-8044-3a5efe6fceb7-config-data\") pod \"horizon-845cfdcdb-2kc8g\" (UID: \"99fc56f4-d509-4545-8044-3a5efe6fceb7\") " pod="horizon-kuttl-tests/horizon-845cfdcdb-2kc8g" Dec 06 15:52:11 crc kubenswrapper[5003]: I1206 15:52:11.643407 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/99fc56f4-d509-4545-8044-3a5efe6fceb7-horizon-secret-key\") pod \"horizon-845cfdcdb-2kc8g\" (UID: \"99fc56f4-d509-4545-8044-3a5efe6fceb7\") " pod="horizon-kuttl-tests/horizon-845cfdcdb-2kc8g" Dec 06 15:52:11 crc kubenswrapper[5003]: E1206 15:52:11.643414 5003 configmap.go:193] Couldn't get configMap horizon-kuttl-tests/horizon-scripts: configmap "horizon-scripts" not found Dec 06 15:52:11 crc kubenswrapper[5003]: I1206 15:52:11.643463 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-85x78\" (UniqueName: \"kubernetes.io/projected/99fc56f4-d509-4545-8044-3a5efe6fceb7-kube-api-access-85x78\") pod \"horizon-845cfdcdb-2kc8g\" (UID: \"99fc56f4-d509-4545-8044-3a5efe6fceb7\") " pod="horizon-kuttl-tests/horizon-845cfdcdb-2kc8g" Dec 06 15:52:11 crc kubenswrapper[5003]: E1206 15:52:11.643508 5003 configmap.go:193] Couldn't get configMap horizon-kuttl-tests/horizon-config-data: configmap "horizon-config-data" not found Dec 06 15:52:11 crc kubenswrapper[5003]: E1206 15:52:11.643526 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/99fc56f4-d509-4545-8044-3a5efe6fceb7-scripts podName:99fc56f4-d509-4545-8044-3a5efe6fceb7 nodeName:}" failed. No retries permitted until 2025-12-06 15:52:12.643482173 +0000 UTC m=+1211.176836614 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "scripts" (UniqueName: "kubernetes.io/configmap/99fc56f4-d509-4545-8044-3a5efe6fceb7-scripts") pod "horizon-845cfdcdb-2kc8g" (UID: "99fc56f4-d509-4545-8044-3a5efe6fceb7") : configmap "horizon-scripts" not found Dec 06 15:52:11 crc kubenswrapper[5003]: E1206 15:52:11.643563 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/99fc56f4-d509-4545-8044-3a5efe6fceb7-config-data podName:99fc56f4-d509-4545-8044-3a5efe6fceb7 nodeName:}" failed. No retries permitted until 2025-12-06 15:52:12.643548195 +0000 UTC m=+1211.176902576 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/99fc56f4-d509-4545-8044-3a5efe6fceb7-config-data") pod "horizon-845cfdcdb-2kc8g" (UID: "99fc56f4-d509-4545-8044-3a5efe6fceb7") : configmap "horizon-config-data" not found Dec 06 15:52:11 crc kubenswrapper[5003]: E1206 15:52:11.643612 5003 secret.go:188] Couldn't get secret horizon-kuttl-tests/horizon: secret "horizon" not found Dec 06 15:52:11 crc kubenswrapper[5003]: E1206 15:52:11.643634 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/99fc56f4-d509-4545-8044-3a5efe6fceb7-horizon-secret-key podName:99fc56f4-d509-4545-8044-3a5efe6fceb7 nodeName:}" failed. No retries permitted until 2025-12-06 15:52:12.643627677 +0000 UTC m=+1211.176982048 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "horizon-secret-key" (UniqueName: "kubernetes.io/secret/99fc56f4-d509-4545-8044-3a5efe6fceb7-horizon-secret-key") pod "horizon-845cfdcdb-2kc8g" (UID: "99fc56f4-d509-4545-8044-3a5efe6fceb7") : secret "horizon" not found Dec 06 15:52:11 crc kubenswrapper[5003]: E1206 15:52:11.647635 5003 projected.go:194] Error preparing data for projected volume kube-api-access-85x78 for pod horizon-kuttl-tests/horizon-845cfdcdb-2kc8g: failed to fetch token: serviceaccounts "horizon-horizon" not found Dec 06 15:52:11 crc kubenswrapper[5003]: E1206 15:52:11.647700 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/99fc56f4-d509-4545-8044-3a5efe6fceb7-kube-api-access-85x78 podName:99fc56f4-d509-4545-8044-3a5efe6fceb7 nodeName:}" failed. No retries permitted until 2025-12-06 15:52:12.647684629 +0000 UTC m=+1211.181039090 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-85x78" (UniqueName: "kubernetes.io/projected/99fc56f4-d509-4545-8044-3a5efe6fceb7-kube-api-access-85x78") pod "horizon-845cfdcdb-2kc8g" (UID: "99fc56f4-d509-4545-8044-3a5efe6fceb7") : failed to fetch token: serviceaccounts "horizon-horizon" not found Dec 06 15:52:11 crc kubenswrapper[5003]: I1206 15:52:11.805994 5003 generic.go:334] "Generic (PLEG): container finished" podID="7b85f3e6-54eb-4b5a-8f44-6614366478c2" containerID="8449acd0a696ee69116edb9a8a5c0faa762bd8cbf814713242cac7adbab2008e" exitCode=0 Dec 06 15:52:11 crc kubenswrapper[5003]: I1206 15:52:11.806069 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="horizon-kuttl-tests/horizon-845cfdcdb-2kc8g" Dec 06 15:52:11 crc kubenswrapper[5003]: I1206 15:52:11.806061 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="horizon-kuttl-tests/horizon-6675bd755-2tpjz" event={"ID":"7b85f3e6-54eb-4b5a-8f44-6614366478c2","Type":"ContainerDied","Data":"8449acd0a696ee69116edb9a8a5c0faa762bd8cbf814713242cac7adbab2008e"} Dec 06 15:52:11 crc kubenswrapper[5003]: I1206 15:52:11.814381 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="horizon-kuttl-tests/horizon-845cfdcdb-2kc8g" Dec 06 15:52:11 crc kubenswrapper[5003]: I1206 15:52:11.948460 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/99fc56f4-d509-4545-8044-3a5efe6fceb7-logs\") pod \"99fc56f4-d509-4545-8044-3a5efe6fceb7\" (UID: \"99fc56f4-d509-4545-8044-3a5efe6fceb7\") " Dec 06 15:52:11 crc kubenswrapper[5003]: I1206 15:52:11.948581 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"policy\" (UniqueName: \"kubernetes.io/configmap/99fc56f4-d509-4545-8044-3a5efe6fceb7-policy\") pod \"99fc56f4-d509-4545-8044-3a5efe6fceb7\" (UID: \"99fc56f4-d509-4545-8044-3a5efe6fceb7\") " Dec 06 15:52:11 crc kubenswrapper[5003]: I1206 15:52:11.948750 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/99fc56f4-d509-4545-8044-3a5efe6fceb7-logs" (OuterVolumeSpecName: "logs") pod "99fc56f4-d509-4545-8044-3a5efe6fceb7" (UID: "99fc56f4-d509-4545-8044-3a5efe6fceb7"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 15:52:11 crc kubenswrapper[5003]: I1206 15:52:11.948956 5003 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/99fc56f4-d509-4545-8044-3a5efe6fceb7-logs\") on node \"crc\" DevicePath \"\"" Dec 06 15:52:11 crc kubenswrapper[5003]: I1206 15:52:11.949230 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/99fc56f4-d509-4545-8044-3a5efe6fceb7-policy" (OuterVolumeSpecName: "policy") pod "99fc56f4-d509-4545-8044-3a5efe6fceb7" (UID: "99fc56f4-d509-4545-8044-3a5efe6fceb7"). InnerVolumeSpecName "policy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 15:52:12 crc kubenswrapper[5003]: I1206 15:52:12.049772 5003 reconciler_common.go:293] "Volume detached for volume \"policy\" (UniqueName: \"kubernetes.io/configmap/99fc56f4-d509-4545-8044-3a5efe6fceb7-policy\") on node \"crc\" DevicePath \"\"" Dec 06 15:52:12 crc kubenswrapper[5003]: I1206 15:52:12.659418 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/99fc56f4-d509-4545-8044-3a5efe6fceb7-config-data\") pod \"horizon-845cfdcdb-2kc8g\" (UID: \"99fc56f4-d509-4545-8044-3a5efe6fceb7\") " pod="horizon-kuttl-tests/horizon-845cfdcdb-2kc8g" Dec 06 15:52:12 crc kubenswrapper[5003]: I1206 15:52:12.659529 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/99fc56f4-d509-4545-8044-3a5efe6fceb7-horizon-secret-key\") pod \"horizon-845cfdcdb-2kc8g\" (UID: \"99fc56f4-d509-4545-8044-3a5efe6fceb7\") " pod="horizon-kuttl-tests/horizon-845cfdcdb-2kc8g" Dec 06 15:52:12 crc kubenswrapper[5003]: I1206 15:52:12.659592 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-85x78\" (UniqueName: \"kubernetes.io/projected/99fc56f4-d509-4545-8044-3a5efe6fceb7-kube-api-access-85x78\") pod \"horizon-845cfdcdb-2kc8g\" (UID: \"99fc56f4-d509-4545-8044-3a5efe6fceb7\") " pod="horizon-kuttl-tests/horizon-845cfdcdb-2kc8g" Dec 06 15:52:12 crc kubenswrapper[5003]: I1206 15:52:12.659645 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/99fc56f4-d509-4545-8044-3a5efe6fceb7-scripts\") pod \"horizon-845cfdcdb-2kc8g\" (UID: \"99fc56f4-d509-4545-8044-3a5efe6fceb7\") " pod="horizon-kuttl-tests/horizon-845cfdcdb-2kc8g" Dec 06 15:52:12 crc kubenswrapper[5003]: E1206 15:52:12.659666 5003 configmap.go:193] Couldn't get configMap horizon-kuttl-tests/horizon-config-data: configmap "horizon-config-data" not found Dec 06 15:52:12 crc kubenswrapper[5003]: E1206 15:52:12.659753 5003 configmap.go:193] Couldn't get configMap horizon-kuttl-tests/horizon-scripts: configmap "horizon-scripts" not found Dec 06 15:52:12 crc kubenswrapper[5003]: E1206 15:52:12.659765 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/99fc56f4-d509-4545-8044-3a5efe6fceb7-config-data podName:99fc56f4-d509-4545-8044-3a5efe6fceb7 nodeName:}" failed. No retries permitted until 2025-12-06 15:52:14.659744804 +0000 UTC m=+1213.193099185 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/99fc56f4-d509-4545-8044-3a5efe6fceb7-config-data") pod "horizon-845cfdcdb-2kc8g" (UID: "99fc56f4-d509-4545-8044-3a5efe6fceb7") : configmap "horizon-config-data" not found Dec 06 15:52:12 crc kubenswrapper[5003]: E1206 15:52:12.659807 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/99fc56f4-d509-4545-8044-3a5efe6fceb7-scripts podName:99fc56f4-d509-4545-8044-3a5efe6fceb7 nodeName:}" failed. No retries permitted until 2025-12-06 15:52:14.659790285 +0000 UTC m=+1213.193144746 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "scripts" (UniqueName: "kubernetes.io/configmap/99fc56f4-d509-4545-8044-3a5efe6fceb7-scripts") pod "horizon-845cfdcdb-2kc8g" (UID: "99fc56f4-d509-4545-8044-3a5efe6fceb7") : configmap "horizon-scripts" not found Dec 06 15:52:12 crc kubenswrapper[5003]: E1206 15:52:12.659859 5003 secret.go:188] Couldn't get secret horizon-kuttl-tests/horizon: secret "horizon" not found Dec 06 15:52:12 crc kubenswrapper[5003]: E1206 15:52:12.659969 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/99fc56f4-d509-4545-8044-3a5efe6fceb7-horizon-secret-key podName:99fc56f4-d509-4545-8044-3a5efe6fceb7 nodeName:}" failed. No retries permitted until 2025-12-06 15:52:14.659935989 +0000 UTC m=+1213.193290420 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "horizon-secret-key" (UniqueName: "kubernetes.io/secret/99fc56f4-d509-4545-8044-3a5efe6fceb7-horizon-secret-key") pod "horizon-845cfdcdb-2kc8g" (UID: "99fc56f4-d509-4545-8044-3a5efe6fceb7") : secret "horizon" not found Dec 06 15:52:12 crc kubenswrapper[5003]: E1206 15:52:12.664895 5003 projected.go:194] Error preparing data for projected volume kube-api-access-85x78 for pod horizon-kuttl-tests/horizon-845cfdcdb-2kc8g: failed to fetch token: serviceaccounts "horizon-horizon" not found Dec 06 15:52:12 crc kubenswrapper[5003]: E1206 15:52:12.665060 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/99fc56f4-d509-4545-8044-3a5efe6fceb7-kube-api-access-85x78 podName:99fc56f4-d509-4545-8044-3a5efe6fceb7 nodeName:}" failed. No retries permitted until 2025-12-06 15:52:14.665022169 +0000 UTC m=+1213.198376590 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-85x78" (UniqueName: "kubernetes.io/projected/99fc56f4-d509-4545-8044-3a5efe6fceb7-kube-api-access-85x78") pod "horizon-845cfdcdb-2kc8g" (UID: "99fc56f4-d509-4545-8044-3a5efe6fceb7") : failed to fetch token: serviceaccounts "horizon-horizon" not found Dec 06 15:52:12 crc kubenswrapper[5003]: I1206 15:52:12.814847 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="horizon-kuttl-tests/horizon-845cfdcdb-2kc8g" Dec 06 15:52:12 crc kubenswrapper[5003]: I1206 15:52:12.867422 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["horizon-kuttl-tests/horizon-845cfdcdb-2kc8g"] Dec 06 15:52:12 crc kubenswrapper[5003]: I1206 15:52:12.878297 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["horizon-kuttl-tests/horizon-845cfdcdb-2kc8g"] Dec 06 15:52:12 crc kubenswrapper[5003]: I1206 15:52:12.965424 5003 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/99fc56f4-d509-4545-8044-3a5efe6fceb7-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Dec 06 15:52:12 crc kubenswrapper[5003]: I1206 15:52:12.965471 5003 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/99fc56f4-d509-4545-8044-3a5efe6fceb7-scripts\") on node \"crc\" DevicePath \"\"" Dec 06 15:52:12 crc kubenswrapper[5003]: I1206 15:52:12.965487 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-85x78\" (UniqueName: \"kubernetes.io/projected/99fc56f4-d509-4545-8044-3a5efe6fceb7-kube-api-access-85x78\") on node \"crc\" DevicePath \"\"" Dec 06 15:52:12 crc kubenswrapper[5003]: I1206 15:52:12.965518 5003 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/99fc56f4-d509-4545-8044-3a5efe6fceb7-config-data\") on node \"crc\" DevicePath \"\"" Dec 06 15:52:13 crc kubenswrapper[5003]: I1206 15:52:13.722351 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="99fc56f4-d509-4545-8044-3a5efe6fceb7" path="/var/lib/kubelet/pods/99fc56f4-d509-4545-8044-3a5efe6fceb7/volumes" Dec 06 15:52:14 crc kubenswrapper[5003]: I1206 15:52:14.421822 5003 prober.go:107] "Probe failed" probeType="Readiness" pod="horizon-kuttl-tests/horizon-6675bd755-2tpjz" podUID="7b85f3e6-54eb-4b5a-8f44-6614366478c2" containerName="horizon" probeResult="failure" output="Get \"http://10.217.0.85:8080/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.85:8080: connect: connection refused" Dec 06 15:52:14 crc kubenswrapper[5003]: I1206 15:52:14.499239 5003 prober.go:107] "Probe failed" probeType="Readiness" pod="horizon-kuttl-tests/horizon-8bb8556c5-r5b4t" podUID="d505c9e5-a4c0-4b9b-8725-3e7ca9dc82d4" containerName="horizon" probeResult="failure" output="Get \"http://10.217.0.86:8080/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.86:8080: connect: connection refused" Dec 06 15:52:14 crc kubenswrapper[5003]: I1206 15:52:14.832181 5003 generic.go:334] "Generic (PLEG): container finished" podID="d505c9e5-a4c0-4b9b-8725-3e7ca9dc82d4" containerID="e7ab6e18d000fecd82f5425cabab6420e08be9fe5fb7152b7ddbf297d280d531" exitCode=0 Dec 06 15:52:14 crc kubenswrapper[5003]: I1206 15:52:14.832231 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="horizon-kuttl-tests/horizon-8bb8556c5-r5b4t" event={"ID":"d505c9e5-a4c0-4b9b-8725-3e7ca9dc82d4","Type":"ContainerDied","Data":"e7ab6e18d000fecd82f5425cabab6420e08be9fe5fb7152b7ddbf297d280d531"} Dec 06 15:52:24 crc kubenswrapper[5003]: I1206 15:52:24.421413 5003 prober.go:107] "Probe failed" probeType="Readiness" pod="horizon-kuttl-tests/horizon-6675bd755-2tpjz" podUID="7b85f3e6-54eb-4b5a-8f44-6614366478c2" containerName="horizon" probeResult="failure" output="Get \"http://10.217.0.85:8080/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.85:8080: connect: connection refused" Dec 06 15:52:24 crc kubenswrapper[5003]: I1206 15:52:24.499198 5003 prober.go:107] "Probe failed" probeType="Readiness" pod="horizon-kuttl-tests/horizon-8bb8556c5-r5b4t" podUID="d505c9e5-a4c0-4b9b-8725-3e7ca9dc82d4" containerName="horizon" probeResult="failure" output="Get \"http://10.217.0.86:8080/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.86:8080: connect: connection refused" Dec 06 15:52:34 crc kubenswrapper[5003]: I1206 15:52:34.421351 5003 prober.go:107] "Probe failed" probeType="Readiness" pod="horizon-kuttl-tests/horizon-6675bd755-2tpjz" podUID="7b85f3e6-54eb-4b5a-8f44-6614366478c2" containerName="horizon" probeResult="failure" output="Get \"http://10.217.0.85:8080/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.85:8080: connect: connection refused" Dec 06 15:52:34 crc kubenswrapper[5003]: I1206 15:52:34.498936 5003 prober.go:107] "Probe failed" probeType="Readiness" pod="horizon-kuttl-tests/horizon-8bb8556c5-r5b4t" podUID="d505c9e5-a4c0-4b9b-8725-3e7ca9dc82d4" containerName="horizon" probeResult="failure" output="Get \"http://10.217.0.86:8080/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.86:8080: connect: connection refused" Dec 06 15:52:34 crc kubenswrapper[5003]: I1206 15:52:34.499291 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="horizon-kuttl-tests/horizon-8bb8556c5-r5b4t" Dec 06 15:52:37 crc kubenswrapper[5003]: I1206 15:52:37.979220 5003 generic.go:334] "Generic (PLEG): container finished" podID="7b85f3e6-54eb-4b5a-8f44-6614366478c2" containerID="4d627c747e76ad4f32ac8caaab128a79f30f78c4a3de60f9f5e7bd950ba47cc0" exitCode=137 Dec 06 15:52:37 crc kubenswrapper[5003]: I1206 15:52:37.979320 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="horizon-kuttl-tests/horizon-6675bd755-2tpjz" event={"ID":"7b85f3e6-54eb-4b5a-8f44-6614366478c2","Type":"ContainerDied","Data":"4d627c747e76ad4f32ac8caaab128a79f30f78c4a3de60f9f5e7bd950ba47cc0"} Dec 06 15:52:38 crc kubenswrapper[5003]: I1206 15:52:38.161884 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="horizon-kuttl-tests/horizon-6675bd755-2tpjz" Dec 06 15:52:38 crc kubenswrapper[5003]: I1206 15:52:38.229254 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/7b85f3e6-54eb-4b5a-8f44-6614366478c2-scripts\") pod \"7b85f3e6-54eb-4b5a-8f44-6614366478c2\" (UID: \"7b85f3e6-54eb-4b5a-8f44-6614366478c2\") " Dec 06 15:52:38 crc kubenswrapper[5003]: I1206 15:52:38.229338 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7b85f3e6-54eb-4b5a-8f44-6614366478c2-logs\") pod \"7b85f3e6-54eb-4b5a-8f44-6614366478c2\" (UID: \"7b85f3e6-54eb-4b5a-8f44-6614366478c2\") " Dec 06 15:52:38 crc kubenswrapper[5003]: I1206 15:52:38.229410 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/7b85f3e6-54eb-4b5a-8f44-6614366478c2-config-data\") pod \"7b85f3e6-54eb-4b5a-8f44-6614366478c2\" (UID: \"7b85f3e6-54eb-4b5a-8f44-6614366478c2\") " Dec 06 15:52:38 crc kubenswrapper[5003]: I1206 15:52:38.229432 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ngpx8\" (UniqueName: \"kubernetes.io/projected/7b85f3e6-54eb-4b5a-8f44-6614366478c2-kube-api-access-ngpx8\") pod \"7b85f3e6-54eb-4b5a-8f44-6614366478c2\" (UID: \"7b85f3e6-54eb-4b5a-8f44-6614366478c2\") " Dec 06 15:52:38 crc kubenswrapper[5003]: I1206 15:52:38.229508 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/7b85f3e6-54eb-4b5a-8f44-6614366478c2-horizon-secret-key\") pod \"7b85f3e6-54eb-4b5a-8f44-6614366478c2\" (UID: \"7b85f3e6-54eb-4b5a-8f44-6614366478c2\") " Dec 06 15:52:38 crc kubenswrapper[5003]: I1206 15:52:38.230051 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7b85f3e6-54eb-4b5a-8f44-6614366478c2-logs" (OuterVolumeSpecName: "logs") pod "7b85f3e6-54eb-4b5a-8f44-6614366478c2" (UID: "7b85f3e6-54eb-4b5a-8f44-6614366478c2"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 15:52:38 crc kubenswrapper[5003]: I1206 15:52:38.235028 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7b85f3e6-54eb-4b5a-8f44-6614366478c2-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "7b85f3e6-54eb-4b5a-8f44-6614366478c2" (UID: "7b85f3e6-54eb-4b5a-8f44-6614366478c2"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 15:52:38 crc kubenswrapper[5003]: I1206 15:52:38.235245 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7b85f3e6-54eb-4b5a-8f44-6614366478c2-kube-api-access-ngpx8" (OuterVolumeSpecName: "kube-api-access-ngpx8") pod "7b85f3e6-54eb-4b5a-8f44-6614366478c2" (UID: "7b85f3e6-54eb-4b5a-8f44-6614366478c2"). InnerVolumeSpecName "kube-api-access-ngpx8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 15:52:38 crc kubenswrapper[5003]: I1206 15:52:38.248804 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7b85f3e6-54eb-4b5a-8f44-6614366478c2-config-data" (OuterVolumeSpecName: "config-data") pod "7b85f3e6-54eb-4b5a-8f44-6614366478c2" (UID: "7b85f3e6-54eb-4b5a-8f44-6614366478c2"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 15:52:38 crc kubenswrapper[5003]: I1206 15:52:38.249106 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7b85f3e6-54eb-4b5a-8f44-6614366478c2-scripts" (OuterVolumeSpecName: "scripts") pod "7b85f3e6-54eb-4b5a-8f44-6614366478c2" (UID: "7b85f3e6-54eb-4b5a-8f44-6614366478c2"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 15:52:38 crc kubenswrapper[5003]: I1206 15:52:38.332344 5003 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/7b85f3e6-54eb-4b5a-8f44-6614366478c2-config-data\") on node \"crc\" DevicePath \"\"" Dec 06 15:52:38 crc kubenswrapper[5003]: I1206 15:52:38.332405 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ngpx8\" (UniqueName: \"kubernetes.io/projected/7b85f3e6-54eb-4b5a-8f44-6614366478c2-kube-api-access-ngpx8\") on node \"crc\" DevicePath \"\"" Dec 06 15:52:38 crc kubenswrapper[5003]: I1206 15:52:38.332418 5003 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/7b85f3e6-54eb-4b5a-8f44-6614366478c2-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Dec 06 15:52:38 crc kubenswrapper[5003]: I1206 15:52:38.332452 5003 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/7b85f3e6-54eb-4b5a-8f44-6614366478c2-scripts\") on node \"crc\" DevicePath \"\"" Dec 06 15:52:38 crc kubenswrapper[5003]: I1206 15:52:38.332466 5003 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7b85f3e6-54eb-4b5a-8f44-6614366478c2-logs\") on node \"crc\" DevicePath \"\"" Dec 06 15:52:38 crc kubenswrapper[5003]: I1206 15:52:38.988767 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="horizon-kuttl-tests/horizon-6675bd755-2tpjz" event={"ID":"7b85f3e6-54eb-4b5a-8f44-6614366478c2","Type":"ContainerDied","Data":"24727e5dd571c7e036d4f31a98c20f3f9345a6b913bc7c9a290c5b1d4d401e48"} Dec 06 15:52:38 crc kubenswrapper[5003]: I1206 15:52:38.988830 5003 scope.go:117] "RemoveContainer" containerID="8449acd0a696ee69116edb9a8a5c0faa762bd8cbf814713242cac7adbab2008e" Dec 06 15:52:38 crc kubenswrapper[5003]: I1206 15:52:38.988868 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="horizon-kuttl-tests/horizon-6675bd755-2tpjz" Dec 06 15:52:39 crc kubenswrapper[5003]: I1206 15:52:39.032438 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["horizon-kuttl-tests/horizon-6675bd755-2tpjz"] Dec 06 15:52:39 crc kubenswrapper[5003]: I1206 15:52:39.048760 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["horizon-kuttl-tests/horizon-6675bd755-2tpjz"] Dec 06 15:52:39 crc kubenswrapper[5003]: I1206 15:52:39.192153 5003 scope.go:117] "RemoveContainer" containerID="4d627c747e76ad4f32ac8caaab128a79f30f78c4a3de60f9f5e7bd950ba47cc0" Dec 06 15:52:39 crc kubenswrapper[5003]: I1206 15:52:39.723276 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7b85f3e6-54eb-4b5a-8f44-6614366478c2" path="/var/lib/kubelet/pods/7b85f3e6-54eb-4b5a-8f44-6614366478c2/volumes" Dec 06 15:52:41 crc kubenswrapper[5003]: I1206 15:52:41.282564 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="horizon-kuttl-tests/horizon-8bb8556c5-r5b4t" Dec 06 15:52:41 crc kubenswrapper[5003]: I1206 15:52:41.395704 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d505c9e5-a4c0-4b9b-8725-3e7ca9dc82d4-logs\") pod \"d505c9e5-a4c0-4b9b-8725-3e7ca9dc82d4\" (UID: \"d505c9e5-a4c0-4b9b-8725-3e7ca9dc82d4\") " Dec 06 15:52:41 crc kubenswrapper[5003]: I1206 15:52:41.395790 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d505c9e5-a4c0-4b9b-8725-3e7ca9dc82d4-scripts\") pod \"d505c9e5-a4c0-4b9b-8725-3e7ca9dc82d4\" (UID: \"d505c9e5-a4c0-4b9b-8725-3e7ca9dc82d4\") " Dec 06 15:52:41 crc kubenswrapper[5003]: I1206 15:52:41.395842 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ls5rn\" (UniqueName: \"kubernetes.io/projected/d505c9e5-a4c0-4b9b-8725-3e7ca9dc82d4-kube-api-access-ls5rn\") pod \"d505c9e5-a4c0-4b9b-8725-3e7ca9dc82d4\" (UID: \"d505c9e5-a4c0-4b9b-8725-3e7ca9dc82d4\") " Dec 06 15:52:41 crc kubenswrapper[5003]: I1206 15:52:41.395890 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/d505c9e5-a4c0-4b9b-8725-3e7ca9dc82d4-horizon-secret-key\") pod \"d505c9e5-a4c0-4b9b-8725-3e7ca9dc82d4\" (UID: \"d505c9e5-a4c0-4b9b-8725-3e7ca9dc82d4\") " Dec 06 15:52:41 crc kubenswrapper[5003]: I1206 15:52:41.395948 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/d505c9e5-a4c0-4b9b-8725-3e7ca9dc82d4-config-data\") pod \"d505c9e5-a4c0-4b9b-8725-3e7ca9dc82d4\" (UID: \"d505c9e5-a4c0-4b9b-8725-3e7ca9dc82d4\") " Dec 06 15:52:41 crc kubenswrapper[5003]: I1206 15:52:41.396902 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d505c9e5-a4c0-4b9b-8725-3e7ca9dc82d4-logs" (OuterVolumeSpecName: "logs") pod "d505c9e5-a4c0-4b9b-8725-3e7ca9dc82d4" (UID: "d505c9e5-a4c0-4b9b-8725-3e7ca9dc82d4"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 15:52:41 crc kubenswrapper[5003]: I1206 15:52:41.397283 5003 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d505c9e5-a4c0-4b9b-8725-3e7ca9dc82d4-logs\") on node \"crc\" DevicePath \"\"" Dec 06 15:52:41 crc kubenswrapper[5003]: I1206 15:52:41.401752 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d505c9e5-a4c0-4b9b-8725-3e7ca9dc82d4-kube-api-access-ls5rn" (OuterVolumeSpecName: "kube-api-access-ls5rn") pod "d505c9e5-a4c0-4b9b-8725-3e7ca9dc82d4" (UID: "d505c9e5-a4c0-4b9b-8725-3e7ca9dc82d4"). InnerVolumeSpecName "kube-api-access-ls5rn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 15:52:41 crc kubenswrapper[5003]: I1206 15:52:41.402243 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d505c9e5-a4c0-4b9b-8725-3e7ca9dc82d4-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "d505c9e5-a4c0-4b9b-8725-3e7ca9dc82d4" (UID: "d505c9e5-a4c0-4b9b-8725-3e7ca9dc82d4"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 15:52:41 crc kubenswrapper[5003]: I1206 15:52:41.413007 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d505c9e5-a4c0-4b9b-8725-3e7ca9dc82d4-scripts" (OuterVolumeSpecName: "scripts") pod "d505c9e5-a4c0-4b9b-8725-3e7ca9dc82d4" (UID: "d505c9e5-a4c0-4b9b-8725-3e7ca9dc82d4"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 15:52:41 crc kubenswrapper[5003]: I1206 15:52:41.423754 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d505c9e5-a4c0-4b9b-8725-3e7ca9dc82d4-config-data" (OuterVolumeSpecName: "config-data") pod "d505c9e5-a4c0-4b9b-8725-3e7ca9dc82d4" (UID: "d505c9e5-a4c0-4b9b-8725-3e7ca9dc82d4"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 15:52:41 crc kubenswrapper[5003]: I1206 15:52:41.498796 5003 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/d505c9e5-a4c0-4b9b-8725-3e7ca9dc82d4-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Dec 06 15:52:41 crc kubenswrapper[5003]: I1206 15:52:41.498898 5003 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/d505c9e5-a4c0-4b9b-8725-3e7ca9dc82d4-config-data\") on node \"crc\" DevicePath \"\"" Dec 06 15:52:41 crc kubenswrapper[5003]: I1206 15:52:41.498920 5003 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d505c9e5-a4c0-4b9b-8725-3e7ca9dc82d4-scripts\") on node \"crc\" DevicePath \"\"" Dec 06 15:52:41 crc kubenswrapper[5003]: I1206 15:52:41.498936 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ls5rn\" (UniqueName: \"kubernetes.io/projected/d505c9e5-a4c0-4b9b-8725-3e7ca9dc82d4-kube-api-access-ls5rn\") on node \"crc\" DevicePath \"\"" Dec 06 15:52:42 crc kubenswrapper[5003]: I1206 15:52:42.012888 5003 generic.go:334] "Generic (PLEG): container finished" podID="d505c9e5-a4c0-4b9b-8725-3e7ca9dc82d4" containerID="e468e6893d1874e63d8d54ae8a7def3d20e08fcab2895d185fd78404468510a0" exitCode=137 Dec 06 15:52:42 crc kubenswrapper[5003]: I1206 15:52:42.012952 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="horizon-kuttl-tests/horizon-8bb8556c5-r5b4t" event={"ID":"d505c9e5-a4c0-4b9b-8725-3e7ca9dc82d4","Type":"ContainerDied","Data":"e468e6893d1874e63d8d54ae8a7def3d20e08fcab2895d185fd78404468510a0"} Dec 06 15:52:42 crc kubenswrapper[5003]: I1206 15:52:42.013002 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="horizon-kuttl-tests/horizon-8bb8556c5-r5b4t" event={"ID":"d505c9e5-a4c0-4b9b-8725-3e7ca9dc82d4","Type":"ContainerDied","Data":"30c7e7033a50894667f352dcf7bcb7a781ce980d5f7cba23b9dc03e1c8fba006"} Dec 06 15:52:42 crc kubenswrapper[5003]: I1206 15:52:42.013039 5003 scope.go:117] "RemoveContainer" containerID="e7ab6e18d000fecd82f5425cabab6420e08be9fe5fb7152b7ddbf297d280d531" Dec 06 15:52:42 crc kubenswrapper[5003]: I1206 15:52:42.013064 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="horizon-kuttl-tests/horizon-8bb8556c5-r5b4t" Dec 06 15:52:42 crc kubenswrapper[5003]: I1206 15:52:42.055709 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["horizon-kuttl-tests/horizon-8bb8556c5-r5b4t"] Dec 06 15:52:42 crc kubenswrapper[5003]: I1206 15:52:42.066600 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["horizon-kuttl-tests/horizon-8bb8556c5-r5b4t"] Dec 06 15:52:42 crc kubenswrapper[5003]: I1206 15:52:42.238652 5003 scope.go:117] "RemoveContainer" containerID="e468e6893d1874e63d8d54ae8a7def3d20e08fcab2895d185fd78404468510a0" Dec 06 15:52:42 crc kubenswrapper[5003]: I1206 15:52:42.260644 5003 scope.go:117] "RemoveContainer" containerID="e7ab6e18d000fecd82f5425cabab6420e08be9fe5fb7152b7ddbf297d280d531" Dec 06 15:52:42 crc kubenswrapper[5003]: E1206 15:52:42.261249 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e7ab6e18d000fecd82f5425cabab6420e08be9fe5fb7152b7ddbf297d280d531\": container with ID starting with e7ab6e18d000fecd82f5425cabab6420e08be9fe5fb7152b7ddbf297d280d531 not found: ID does not exist" containerID="e7ab6e18d000fecd82f5425cabab6420e08be9fe5fb7152b7ddbf297d280d531" Dec 06 15:52:42 crc kubenswrapper[5003]: I1206 15:52:42.261320 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e7ab6e18d000fecd82f5425cabab6420e08be9fe5fb7152b7ddbf297d280d531"} err="failed to get container status \"e7ab6e18d000fecd82f5425cabab6420e08be9fe5fb7152b7ddbf297d280d531\": rpc error: code = NotFound desc = could not find container \"e7ab6e18d000fecd82f5425cabab6420e08be9fe5fb7152b7ddbf297d280d531\": container with ID starting with e7ab6e18d000fecd82f5425cabab6420e08be9fe5fb7152b7ddbf297d280d531 not found: ID does not exist" Dec 06 15:52:42 crc kubenswrapper[5003]: I1206 15:52:42.261372 5003 scope.go:117] "RemoveContainer" containerID="e468e6893d1874e63d8d54ae8a7def3d20e08fcab2895d185fd78404468510a0" Dec 06 15:52:42 crc kubenswrapper[5003]: E1206 15:52:42.261931 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e468e6893d1874e63d8d54ae8a7def3d20e08fcab2895d185fd78404468510a0\": container with ID starting with e468e6893d1874e63d8d54ae8a7def3d20e08fcab2895d185fd78404468510a0 not found: ID does not exist" containerID="e468e6893d1874e63d8d54ae8a7def3d20e08fcab2895d185fd78404468510a0" Dec 06 15:52:42 crc kubenswrapper[5003]: I1206 15:52:42.261963 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e468e6893d1874e63d8d54ae8a7def3d20e08fcab2895d185fd78404468510a0"} err="failed to get container status \"e468e6893d1874e63d8d54ae8a7def3d20e08fcab2895d185fd78404468510a0\": rpc error: code = NotFound desc = could not find container \"e468e6893d1874e63d8d54ae8a7def3d20e08fcab2895d185fd78404468510a0\": container with ID starting with e468e6893d1874e63d8d54ae8a7def3d20e08fcab2895d185fd78404468510a0 not found: ID does not exist" Dec 06 15:52:43 crc kubenswrapper[5003]: I1206 15:52:43.139531 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["horizon-kuttl-tests/horizon-5b545c459d-zq4qt"] Dec 06 15:52:43 crc kubenswrapper[5003]: E1206 15:52:43.139777 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7b85f3e6-54eb-4b5a-8f44-6614366478c2" containerName="horizon-log" Dec 06 15:52:43 crc kubenswrapper[5003]: I1206 15:52:43.139789 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="7b85f3e6-54eb-4b5a-8f44-6614366478c2" containerName="horizon-log" Dec 06 15:52:43 crc kubenswrapper[5003]: E1206 15:52:43.139802 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d505c9e5-a4c0-4b9b-8725-3e7ca9dc82d4" containerName="horizon" Dec 06 15:52:43 crc kubenswrapper[5003]: I1206 15:52:43.139808 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="d505c9e5-a4c0-4b9b-8725-3e7ca9dc82d4" containerName="horizon" Dec 06 15:52:43 crc kubenswrapper[5003]: E1206 15:52:43.139817 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7b85f3e6-54eb-4b5a-8f44-6614366478c2" containerName="horizon" Dec 06 15:52:43 crc kubenswrapper[5003]: I1206 15:52:43.139823 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="7b85f3e6-54eb-4b5a-8f44-6614366478c2" containerName="horizon" Dec 06 15:52:43 crc kubenswrapper[5003]: E1206 15:52:43.139833 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d505c9e5-a4c0-4b9b-8725-3e7ca9dc82d4" containerName="horizon-log" Dec 06 15:52:43 crc kubenswrapper[5003]: I1206 15:52:43.139839 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="d505c9e5-a4c0-4b9b-8725-3e7ca9dc82d4" containerName="horizon-log" Dec 06 15:52:43 crc kubenswrapper[5003]: I1206 15:52:43.139947 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="d505c9e5-a4c0-4b9b-8725-3e7ca9dc82d4" containerName="horizon-log" Dec 06 15:52:43 crc kubenswrapper[5003]: I1206 15:52:43.139982 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="7b85f3e6-54eb-4b5a-8f44-6614366478c2" containerName="horizon" Dec 06 15:52:43 crc kubenswrapper[5003]: I1206 15:52:43.139994 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="d505c9e5-a4c0-4b9b-8725-3e7ca9dc82d4" containerName="horizon" Dec 06 15:52:43 crc kubenswrapper[5003]: I1206 15:52:43.140002 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="7b85f3e6-54eb-4b5a-8f44-6614366478c2" containerName="horizon-log" Dec 06 15:52:43 crc kubenswrapper[5003]: I1206 15:52:43.140675 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="horizon-kuttl-tests/horizon-5b545c459d-zq4qt" Dec 06 15:52:43 crc kubenswrapper[5003]: I1206 15:52:43.144722 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"horizon-kuttl-tests"/"horizon-scripts" Dec 06 15:52:43 crc kubenswrapper[5003]: I1206 15:52:43.144854 5003 reflector.go:368] Caches populated for *v1.Secret from object-"horizon-kuttl-tests"/"horizon" Dec 06 15:52:43 crc kubenswrapper[5003]: I1206 15:52:43.144894 5003 reflector.go:368] Caches populated for *v1.Secret from object-"horizon-kuttl-tests"/"combined-ca-bundle" Dec 06 15:52:43 crc kubenswrapper[5003]: I1206 15:52:43.144916 5003 reflector.go:368] Caches populated for *v1.Secret from object-"horizon-kuttl-tests"/"cert-horizon-svc" Dec 06 15:52:43 crc kubenswrapper[5003]: I1206 15:52:43.144869 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"horizon-kuttl-tests"/"horizon-config-data" Dec 06 15:52:43 crc kubenswrapper[5003]: I1206 15:52:43.144864 5003 reflector.go:368] Caches populated for *v1.Secret from object-"horizon-kuttl-tests"/"horizon-horizon-dockercfg-djkg7" Dec 06 15:52:43 crc kubenswrapper[5003]: I1206 15:52:43.164295 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["horizon-kuttl-tests/horizon-5b545c459d-zq4qt"] Dec 06 15:52:43 crc kubenswrapper[5003]: I1206 15:52:43.176090 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/c0b60488-38ee-46bf-bc3f-20ab19554efd-horizon-tls-certs\") pod \"horizon-5b545c459d-zq4qt\" (UID: \"c0b60488-38ee-46bf-bc3f-20ab19554efd\") " pod="horizon-kuttl-tests/horizon-5b545c459d-zq4qt" Dec 06 15:52:43 crc kubenswrapper[5003]: I1206 15:52:43.176153 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c0b60488-38ee-46bf-bc3f-20ab19554efd-config-data\") pod \"horizon-5b545c459d-zq4qt\" (UID: \"c0b60488-38ee-46bf-bc3f-20ab19554efd\") " pod="horizon-kuttl-tests/horizon-5b545c459d-zq4qt" Dec 06 15:52:43 crc kubenswrapper[5003]: I1206 15:52:43.176180 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c0b60488-38ee-46bf-bc3f-20ab19554efd-combined-ca-bundle\") pod \"horizon-5b545c459d-zq4qt\" (UID: \"c0b60488-38ee-46bf-bc3f-20ab19554efd\") " pod="horizon-kuttl-tests/horizon-5b545c459d-zq4qt" Dec 06 15:52:43 crc kubenswrapper[5003]: I1206 15:52:43.176203 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/c0b60488-38ee-46bf-bc3f-20ab19554efd-horizon-secret-key\") pod \"horizon-5b545c459d-zq4qt\" (UID: \"c0b60488-38ee-46bf-bc3f-20ab19554efd\") " pod="horizon-kuttl-tests/horizon-5b545c459d-zq4qt" Dec 06 15:52:43 crc kubenswrapper[5003]: I1206 15:52:43.176224 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xqswz\" (UniqueName: \"kubernetes.io/projected/c0b60488-38ee-46bf-bc3f-20ab19554efd-kube-api-access-xqswz\") pod \"horizon-5b545c459d-zq4qt\" (UID: \"c0b60488-38ee-46bf-bc3f-20ab19554efd\") " pod="horizon-kuttl-tests/horizon-5b545c459d-zq4qt" Dec 06 15:52:43 crc kubenswrapper[5003]: I1206 15:52:43.176250 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c0b60488-38ee-46bf-bc3f-20ab19554efd-logs\") pod \"horizon-5b545c459d-zq4qt\" (UID: \"c0b60488-38ee-46bf-bc3f-20ab19554efd\") " pod="horizon-kuttl-tests/horizon-5b545c459d-zq4qt" Dec 06 15:52:43 crc kubenswrapper[5003]: I1206 15:52:43.176268 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c0b60488-38ee-46bf-bc3f-20ab19554efd-scripts\") pod \"horizon-5b545c459d-zq4qt\" (UID: \"c0b60488-38ee-46bf-bc3f-20ab19554efd\") " pod="horizon-kuttl-tests/horizon-5b545c459d-zq4qt" Dec 06 15:52:43 crc kubenswrapper[5003]: I1206 15:52:43.198799 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["horizon-kuttl-tests/horizon-579fd4dcd4-sb9ct"] Dec 06 15:52:43 crc kubenswrapper[5003]: I1206 15:52:43.200110 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="horizon-kuttl-tests/horizon-579fd4dcd4-sb9ct" Dec 06 15:52:43 crc kubenswrapper[5003]: I1206 15:52:43.216381 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["horizon-kuttl-tests/horizon-579fd4dcd4-sb9ct"] Dec 06 15:52:43 crc kubenswrapper[5003]: I1206 15:52:43.276891 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e02d7ccc-fcad-4ca3-9d5f-f966c47c2a66-scripts\") pod \"horizon-579fd4dcd4-sb9ct\" (UID: \"e02d7ccc-fcad-4ca3-9d5f-f966c47c2a66\") " pod="horizon-kuttl-tests/horizon-579fd4dcd4-sb9ct" Dec 06 15:52:43 crc kubenswrapper[5003]: I1206 15:52:43.277092 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e02d7ccc-fcad-4ca3-9d5f-f966c47c2a66-logs\") pod \"horizon-579fd4dcd4-sb9ct\" (UID: \"e02d7ccc-fcad-4ca3-9d5f-f966c47c2a66\") " pod="horizon-kuttl-tests/horizon-579fd4dcd4-sb9ct" Dec 06 15:52:43 crc kubenswrapper[5003]: I1206 15:52:43.277166 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/c0b60488-38ee-46bf-bc3f-20ab19554efd-horizon-tls-certs\") pod \"horizon-5b545c459d-zq4qt\" (UID: \"c0b60488-38ee-46bf-bc3f-20ab19554efd\") " pod="horizon-kuttl-tests/horizon-5b545c459d-zq4qt" Dec 06 15:52:43 crc kubenswrapper[5003]: I1206 15:52:43.277208 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c0b60488-38ee-46bf-bc3f-20ab19554efd-config-data\") pod \"horizon-5b545c459d-zq4qt\" (UID: \"c0b60488-38ee-46bf-bc3f-20ab19554efd\") " pod="horizon-kuttl-tests/horizon-5b545c459d-zq4qt" Dec 06 15:52:43 crc kubenswrapper[5003]: I1206 15:52:43.277261 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c0b60488-38ee-46bf-bc3f-20ab19554efd-combined-ca-bundle\") pod \"horizon-5b545c459d-zq4qt\" (UID: \"c0b60488-38ee-46bf-bc3f-20ab19554efd\") " pod="horizon-kuttl-tests/horizon-5b545c459d-zq4qt" Dec 06 15:52:43 crc kubenswrapper[5003]: I1206 15:52:43.277289 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/c0b60488-38ee-46bf-bc3f-20ab19554efd-horizon-secret-key\") pod \"horizon-5b545c459d-zq4qt\" (UID: \"c0b60488-38ee-46bf-bc3f-20ab19554efd\") " pod="horizon-kuttl-tests/horizon-5b545c459d-zq4qt" Dec 06 15:52:43 crc kubenswrapper[5003]: I1206 15:52:43.277323 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xqswz\" (UniqueName: \"kubernetes.io/projected/c0b60488-38ee-46bf-bc3f-20ab19554efd-kube-api-access-xqswz\") pod \"horizon-5b545c459d-zq4qt\" (UID: \"c0b60488-38ee-46bf-bc3f-20ab19554efd\") " pod="horizon-kuttl-tests/horizon-5b545c459d-zq4qt" Dec 06 15:52:43 crc kubenswrapper[5003]: I1206 15:52:43.277393 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c0b60488-38ee-46bf-bc3f-20ab19554efd-logs\") pod \"horizon-5b545c459d-zq4qt\" (UID: \"c0b60488-38ee-46bf-bc3f-20ab19554efd\") " pod="horizon-kuttl-tests/horizon-5b545c459d-zq4qt" Dec 06 15:52:43 crc kubenswrapper[5003]: I1206 15:52:43.277425 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c0b60488-38ee-46bf-bc3f-20ab19554efd-scripts\") pod \"horizon-5b545c459d-zq4qt\" (UID: \"c0b60488-38ee-46bf-bc3f-20ab19554efd\") " pod="horizon-kuttl-tests/horizon-5b545c459d-zq4qt" Dec 06 15:52:43 crc kubenswrapper[5003]: I1206 15:52:43.277448 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p68lt\" (UniqueName: \"kubernetes.io/projected/e02d7ccc-fcad-4ca3-9d5f-f966c47c2a66-kube-api-access-p68lt\") pod \"horizon-579fd4dcd4-sb9ct\" (UID: \"e02d7ccc-fcad-4ca3-9d5f-f966c47c2a66\") " pod="horizon-kuttl-tests/horizon-579fd4dcd4-sb9ct" Dec 06 15:52:43 crc kubenswrapper[5003]: I1206 15:52:43.277708 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/e02d7ccc-fcad-4ca3-9d5f-f966c47c2a66-horizon-tls-certs\") pod \"horizon-579fd4dcd4-sb9ct\" (UID: \"e02d7ccc-fcad-4ca3-9d5f-f966c47c2a66\") " pod="horizon-kuttl-tests/horizon-579fd4dcd4-sb9ct" Dec 06 15:52:43 crc kubenswrapper[5003]: I1206 15:52:43.277740 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/e02d7ccc-fcad-4ca3-9d5f-f966c47c2a66-horizon-secret-key\") pod \"horizon-579fd4dcd4-sb9ct\" (UID: \"e02d7ccc-fcad-4ca3-9d5f-f966c47c2a66\") " pod="horizon-kuttl-tests/horizon-579fd4dcd4-sb9ct" Dec 06 15:52:43 crc kubenswrapper[5003]: I1206 15:52:43.278205 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/e02d7ccc-fcad-4ca3-9d5f-f966c47c2a66-config-data\") pod \"horizon-579fd4dcd4-sb9ct\" (UID: \"e02d7ccc-fcad-4ca3-9d5f-f966c47c2a66\") " pod="horizon-kuttl-tests/horizon-579fd4dcd4-sb9ct" Dec 06 15:52:43 crc kubenswrapper[5003]: I1206 15:52:43.278202 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c0b60488-38ee-46bf-bc3f-20ab19554efd-logs\") pod \"horizon-5b545c459d-zq4qt\" (UID: \"c0b60488-38ee-46bf-bc3f-20ab19554efd\") " pod="horizon-kuttl-tests/horizon-5b545c459d-zq4qt" Dec 06 15:52:43 crc kubenswrapper[5003]: I1206 15:52:43.278266 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e02d7ccc-fcad-4ca3-9d5f-f966c47c2a66-combined-ca-bundle\") pod \"horizon-579fd4dcd4-sb9ct\" (UID: \"e02d7ccc-fcad-4ca3-9d5f-f966c47c2a66\") " pod="horizon-kuttl-tests/horizon-579fd4dcd4-sb9ct" Dec 06 15:52:43 crc kubenswrapper[5003]: I1206 15:52:43.278593 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c0b60488-38ee-46bf-bc3f-20ab19554efd-scripts\") pod \"horizon-5b545c459d-zq4qt\" (UID: \"c0b60488-38ee-46bf-bc3f-20ab19554efd\") " pod="horizon-kuttl-tests/horizon-5b545c459d-zq4qt" Dec 06 15:52:43 crc kubenswrapper[5003]: I1206 15:52:43.278761 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c0b60488-38ee-46bf-bc3f-20ab19554efd-config-data\") pod \"horizon-5b545c459d-zq4qt\" (UID: \"c0b60488-38ee-46bf-bc3f-20ab19554efd\") " pod="horizon-kuttl-tests/horizon-5b545c459d-zq4qt" Dec 06 15:52:43 crc kubenswrapper[5003]: I1206 15:52:43.283033 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c0b60488-38ee-46bf-bc3f-20ab19554efd-combined-ca-bundle\") pod \"horizon-5b545c459d-zq4qt\" (UID: \"c0b60488-38ee-46bf-bc3f-20ab19554efd\") " pod="horizon-kuttl-tests/horizon-5b545c459d-zq4qt" Dec 06 15:52:43 crc kubenswrapper[5003]: I1206 15:52:43.283072 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/c0b60488-38ee-46bf-bc3f-20ab19554efd-horizon-tls-certs\") pod \"horizon-5b545c459d-zq4qt\" (UID: \"c0b60488-38ee-46bf-bc3f-20ab19554efd\") " pod="horizon-kuttl-tests/horizon-5b545c459d-zq4qt" Dec 06 15:52:43 crc kubenswrapper[5003]: I1206 15:52:43.283525 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/c0b60488-38ee-46bf-bc3f-20ab19554efd-horizon-secret-key\") pod \"horizon-5b545c459d-zq4qt\" (UID: \"c0b60488-38ee-46bf-bc3f-20ab19554efd\") " pod="horizon-kuttl-tests/horizon-5b545c459d-zq4qt" Dec 06 15:52:43 crc kubenswrapper[5003]: I1206 15:52:43.294941 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xqswz\" (UniqueName: \"kubernetes.io/projected/c0b60488-38ee-46bf-bc3f-20ab19554efd-kube-api-access-xqswz\") pod \"horizon-5b545c459d-zq4qt\" (UID: \"c0b60488-38ee-46bf-bc3f-20ab19554efd\") " pod="horizon-kuttl-tests/horizon-5b545c459d-zq4qt" Dec 06 15:52:43 crc kubenswrapper[5003]: I1206 15:52:43.379302 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e02d7ccc-fcad-4ca3-9d5f-f966c47c2a66-combined-ca-bundle\") pod \"horizon-579fd4dcd4-sb9ct\" (UID: \"e02d7ccc-fcad-4ca3-9d5f-f966c47c2a66\") " pod="horizon-kuttl-tests/horizon-579fd4dcd4-sb9ct" Dec 06 15:52:43 crc kubenswrapper[5003]: I1206 15:52:43.379581 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e02d7ccc-fcad-4ca3-9d5f-f966c47c2a66-scripts\") pod \"horizon-579fd4dcd4-sb9ct\" (UID: \"e02d7ccc-fcad-4ca3-9d5f-f966c47c2a66\") " pod="horizon-kuttl-tests/horizon-579fd4dcd4-sb9ct" Dec 06 15:52:43 crc kubenswrapper[5003]: I1206 15:52:43.379643 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e02d7ccc-fcad-4ca3-9d5f-f966c47c2a66-logs\") pod \"horizon-579fd4dcd4-sb9ct\" (UID: \"e02d7ccc-fcad-4ca3-9d5f-f966c47c2a66\") " pod="horizon-kuttl-tests/horizon-579fd4dcd4-sb9ct" Dec 06 15:52:43 crc kubenswrapper[5003]: I1206 15:52:43.379702 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p68lt\" (UniqueName: \"kubernetes.io/projected/e02d7ccc-fcad-4ca3-9d5f-f966c47c2a66-kube-api-access-p68lt\") pod \"horizon-579fd4dcd4-sb9ct\" (UID: \"e02d7ccc-fcad-4ca3-9d5f-f966c47c2a66\") " pod="horizon-kuttl-tests/horizon-579fd4dcd4-sb9ct" Dec 06 15:52:43 crc kubenswrapper[5003]: I1206 15:52:43.379728 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/e02d7ccc-fcad-4ca3-9d5f-f966c47c2a66-horizon-tls-certs\") pod \"horizon-579fd4dcd4-sb9ct\" (UID: \"e02d7ccc-fcad-4ca3-9d5f-f966c47c2a66\") " pod="horizon-kuttl-tests/horizon-579fd4dcd4-sb9ct" Dec 06 15:52:43 crc kubenswrapper[5003]: I1206 15:52:43.379754 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/e02d7ccc-fcad-4ca3-9d5f-f966c47c2a66-horizon-secret-key\") pod \"horizon-579fd4dcd4-sb9ct\" (UID: \"e02d7ccc-fcad-4ca3-9d5f-f966c47c2a66\") " pod="horizon-kuttl-tests/horizon-579fd4dcd4-sb9ct" Dec 06 15:52:43 crc kubenswrapper[5003]: I1206 15:52:43.379782 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/e02d7ccc-fcad-4ca3-9d5f-f966c47c2a66-config-data\") pod \"horizon-579fd4dcd4-sb9ct\" (UID: \"e02d7ccc-fcad-4ca3-9d5f-f966c47c2a66\") " pod="horizon-kuttl-tests/horizon-579fd4dcd4-sb9ct" Dec 06 15:52:43 crc kubenswrapper[5003]: I1206 15:52:43.380852 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/e02d7ccc-fcad-4ca3-9d5f-f966c47c2a66-config-data\") pod \"horizon-579fd4dcd4-sb9ct\" (UID: \"e02d7ccc-fcad-4ca3-9d5f-f966c47c2a66\") " pod="horizon-kuttl-tests/horizon-579fd4dcd4-sb9ct" Dec 06 15:52:43 crc kubenswrapper[5003]: I1206 15:52:43.381184 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e02d7ccc-fcad-4ca3-9d5f-f966c47c2a66-scripts\") pod \"horizon-579fd4dcd4-sb9ct\" (UID: \"e02d7ccc-fcad-4ca3-9d5f-f966c47c2a66\") " pod="horizon-kuttl-tests/horizon-579fd4dcd4-sb9ct" Dec 06 15:52:43 crc kubenswrapper[5003]: I1206 15:52:43.381718 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e02d7ccc-fcad-4ca3-9d5f-f966c47c2a66-logs\") pod \"horizon-579fd4dcd4-sb9ct\" (UID: \"e02d7ccc-fcad-4ca3-9d5f-f966c47c2a66\") " pod="horizon-kuttl-tests/horizon-579fd4dcd4-sb9ct" Dec 06 15:52:43 crc kubenswrapper[5003]: I1206 15:52:43.383068 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e02d7ccc-fcad-4ca3-9d5f-f966c47c2a66-combined-ca-bundle\") pod \"horizon-579fd4dcd4-sb9ct\" (UID: \"e02d7ccc-fcad-4ca3-9d5f-f966c47c2a66\") " pod="horizon-kuttl-tests/horizon-579fd4dcd4-sb9ct" Dec 06 15:52:43 crc kubenswrapper[5003]: I1206 15:52:43.383533 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/e02d7ccc-fcad-4ca3-9d5f-f966c47c2a66-horizon-secret-key\") pod \"horizon-579fd4dcd4-sb9ct\" (UID: \"e02d7ccc-fcad-4ca3-9d5f-f966c47c2a66\") " pod="horizon-kuttl-tests/horizon-579fd4dcd4-sb9ct" Dec 06 15:52:43 crc kubenswrapper[5003]: I1206 15:52:43.384617 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/e02d7ccc-fcad-4ca3-9d5f-f966c47c2a66-horizon-tls-certs\") pod \"horizon-579fd4dcd4-sb9ct\" (UID: \"e02d7ccc-fcad-4ca3-9d5f-f966c47c2a66\") " pod="horizon-kuttl-tests/horizon-579fd4dcd4-sb9ct" Dec 06 15:52:43 crc kubenswrapper[5003]: I1206 15:52:43.405455 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p68lt\" (UniqueName: \"kubernetes.io/projected/e02d7ccc-fcad-4ca3-9d5f-f966c47c2a66-kube-api-access-p68lt\") pod \"horizon-579fd4dcd4-sb9ct\" (UID: \"e02d7ccc-fcad-4ca3-9d5f-f966c47c2a66\") " pod="horizon-kuttl-tests/horizon-579fd4dcd4-sb9ct" Dec 06 15:52:43 crc kubenswrapper[5003]: I1206 15:52:43.457634 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="horizon-kuttl-tests/horizon-5b545c459d-zq4qt" Dec 06 15:52:43 crc kubenswrapper[5003]: I1206 15:52:43.516682 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="horizon-kuttl-tests/horizon-579fd4dcd4-sb9ct" Dec 06 15:52:43 crc kubenswrapper[5003]: I1206 15:52:43.730795 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d505c9e5-a4c0-4b9b-8725-3e7ca9dc82d4" path="/var/lib/kubelet/pods/d505c9e5-a4c0-4b9b-8725-3e7ca9dc82d4/volumes" Dec 06 15:52:43 crc kubenswrapper[5003]: I1206 15:52:43.855798 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["horizon-kuttl-tests/horizon-5b545c459d-zq4qt"] Dec 06 15:52:43 crc kubenswrapper[5003]: W1206 15:52:43.859097 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc0b60488_38ee_46bf_bc3f_20ab19554efd.slice/crio-667f836981e59a1de0c2975f92aabcd73635f19eb659ff0fad5f12a4ff087f5c WatchSource:0}: Error finding container 667f836981e59a1de0c2975f92aabcd73635f19eb659ff0fad5f12a4ff087f5c: Status 404 returned error can't find the container with id 667f836981e59a1de0c2975f92aabcd73635f19eb659ff0fad5f12a4ff087f5c Dec 06 15:52:43 crc kubenswrapper[5003]: I1206 15:52:43.945056 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["horizon-kuttl-tests/horizon-579fd4dcd4-sb9ct"] Dec 06 15:52:43 crc kubenswrapper[5003]: W1206 15:52:43.949471 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode02d7ccc_fcad_4ca3_9d5f_f966c47c2a66.slice/crio-745c9608ffb284f72cd7c31556cf802c214ef207db399cf8dde5948f321c9fbb WatchSource:0}: Error finding container 745c9608ffb284f72cd7c31556cf802c214ef207db399cf8dde5948f321c9fbb: Status 404 returned error can't find the container with id 745c9608ffb284f72cd7c31556cf802c214ef207db399cf8dde5948f321c9fbb Dec 06 15:52:44 crc kubenswrapper[5003]: I1206 15:52:44.086976 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="horizon-kuttl-tests/horizon-579fd4dcd4-sb9ct" event={"ID":"e02d7ccc-fcad-4ca3-9d5f-f966c47c2a66","Type":"ContainerStarted","Data":"745c9608ffb284f72cd7c31556cf802c214ef207db399cf8dde5948f321c9fbb"} Dec 06 15:52:44 crc kubenswrapper[5003]: I1206 15:52:44.088653 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="horizon-kuttl-tests/horizon-5b545c459d-zq4qt" event={"ID":"c0b60488-38ee-46bf-bc3f-20ab19554efd","Type":"ContainerStarted","Data":"a4a5d34a1f8238edaa6db392a7453e6df5e89701d83e7a256ff063fc0a471aef"} Dec 06 15:52:44 crc kubenswrapper[5003]: I1206 15:52:44.088696 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="horizon-kuttl-tests/horizon-5b545c459d-zq4qt" event={"ID":"c0b60488-38ee-46bf-bc3f-20ab19554efd","Type":"ContainerStarted","Data":"667f836981e59a1de0c2975f92aabcd73635f19eb659ff0fad5f12a4ff087f5c"} Dec 06 15:52:45 crc kubenswrapper[5003]: I1206 15:52:45.097251 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="horizon-kuttl-tests/horizon-579fd4dcd4-sb9ct" event={"ID":"e02d7ccc-fcad-4ca3-9d5f-f966c47c2a66","Type":"ContainerStarted","Data":"6c2e752f888793d65d8000ddeffa4cd119cd51c58b44170649e7f8ae9748aa08"} Dec 06 15:52:45 crc kubenswrapper[5003]: I1206 15:52:45.097580 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="horizon-kuttl-tests/horizon-579fd4dcd4-sb9ct" event={"ID":"e02d7ccc-fcad-4ca3-9d5f-f966c47c2a66","Type":"ContainerStarted","Data":"ee4731862ae6218d41e41d8e19f42e3142adc2d273e54901e45cd37ee946737e"} Dec 06 15:52:45 crc kubenswrapper[5003]: I1206 15:52:45.102450 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="horizon-kuttl-tests/horizon-5b545c459d-zq4qt" event={"ID":"c0b60488-38ee-46bf-bc3f-20ab19554efd","Type":"ContainerStarted","Data":"c5c8c7f969cd45cf4bd68be5c741daf4a619f027d5218ad5ba25022f57f940ef"} Dec 06 15:52:45 crc kubenswrapper[5003]: I1206 15:52:45.127759 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="horizon-kuttl-tests/horizon-579fd4dcd4-sb9ct" podStartSLOduration=2.127729589 podStartE2EDuration="2.127729589s" podCreationTimestamp="2025-12-06 15:52:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 15:52:45.117754646 +0000 UTC m=+1243.651109057" watchObservedRunningTime="2025-12-06 15:52:45.127729589 +0000 UTC m=+1243.661083990" Dec 06 15:52:45 crc kubenswrapper[5003]: I1206 15:52:45.154101 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="horizon-kuttl-tests/horizon-5b545c459d-zq4qt" podStartSLOduration=2.154080473 podStartE2EDuration="2.154080473s" podCreationTimestamp="2025-12-06 15:52:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 15:52:45.151154931 +0000 UTC m=+1243.684509342" watchObservedRunningTime="2025-12-06 15:52:45.154080473 +0000 UTC m=+1243.687434874" Dec 06 15:52:53 crc kubenswrapper[5003]: I1206 15:52:53.458661 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="horizon-kuttl-tests/horizon-5b545c459d-zq4qt" Dec 06 15:52:53 crc kubenswrapper[5003]: I1206 15:52:53.459196 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="horizon-kuttl-tests/horizon-5b545c459d-zq4qt" Dec 06 15:52:53 crc kubenswrapper[5003]: I1206 15:52:53.517466 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="horizon-kuttl-tests/horizon-579fd4dcd4-sb9ct" Dec 06 15:52:53 crc kubenswrapper[5003]: I1206 15:52:53.518306 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="horizon-kuttl-tests/horizon-579fd4dcd4-sb9ct" Dec 06 15:53:05 crc kubenswrapper[5003]: I1206 15:53:05.185659 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="horizon-kuttl-tests/horizon-579fd4dcd4-sb9ct" Dec 06 15:53:05 crc kubenswrapper[5003]: I1206 15:53:05.234571 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="horizon-kuttl-tests/horizon-5b545c459d-zq4qt" Dec 06 15:53:06 crc kubenswrapper[5003]: I1206 15:53:06.869860 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="horizon-kuttl-tests/horizon-5b545c459d-zq4qt" Dec 06 15:53:06 crc kubenswrapper[5003]: I1206 15:53:06.909113 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="horizon-kuttl-tests/horizon-579fd4dcd4-sb9ct" Dec 06 15:53:06 crc kubenswrapper[5003]: I1206 15:53:06.972340 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["horizon-kuttl-tests/horizon-5b545c459d-zq4qt"] Dec 06 15:53:07 crc kubenswrapper[5003]: I1206 15:53:07.292051 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="horizon-kuttl-tests/horizon-5b545c459d-zq4qt" podUID="c0b60488-38ee-46bf-bc3f-20ab19554efd" containerName="horizon-log" containerID="cri-o://a4a5d34a1f8238edaa6db392a7453e6df5e89701d83e7a256ff063fc0a471aef" gracePeriod=30 Dec 06 15:53:07 crc kubenswrapper[5003]: I1206 15:53:07.292142 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="horizon-kuttl-tests/horizon-5b545c459d-zq4qt" podUID="c0b60488-38ee-46bf-bc3f-20ab19554efd" containerName="horizon" containerID="cri-o://c5c8c7f969cd45cf4bd68be5c741daf4a619f027d5218ad5ba25022f57f940ef" gracePeriod=30 Dec 06 15:53:07 crc kubenswrapper[5003]: I1206 15:53:07.424525 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["horizon-kuttl-tests/horizon-579fd4dcd4-sb9ct"] Dec 06 15:53:07 crc kubenswrapper[5003]: I1206 15:53:07.424782 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="horizon-kuttl-tests/horizon-579fd4dcd4-sb9ct" podUID="e02d7ccc-fcad-4ca3-9d5f-f966c47c2a66" containerName="horizon-log" containerID="cri-o://6c2e752f888793d65d8000ddeffa4cd119cd51c58b44170649e7f8ae9748aa08" gracePeriod=30 Dec 06 15:53:07 crc kubenswrapper[5003]: I1206 15:53:07.424928 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="horizon-kuttl-tests/horizon-579fd4dcd4-sb9ct" podUID="e02d7ccc-fcad-4ca3-9d5f-f966c47c2a66" containerName="horizon" containerID="cri-o://ee4731862ae6218d41e41d8e19f42e3142adc2d273e54901e45cd37ee946737e" gracePeriod=30 Dec 06 15:53:11 crc kubenswrapper[5003]: I1206 15:53:11.334431 5003 generic.go:334] "Generic (PLEG): container finished" podID="e02d7ccc-fcad-4ca3-9d5f-f966c47c2a66" containerID="ee4731862ae6218d41e41d8e19f42e3142adc2d273e54901e45cd37ee946737e" exitCode=0 Dec 06 15:53:11 crc kubenswrapper[5003]: I1206 15:53:11.334523 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="horizon-kuttl-tests/horizon-579fd4dcd4-sb9ct" event={"ID":"e02d7ccc-fcad-4ca3-9d5f-f966c47c2a66","Type":"ContainerDied","Data":"ee4731862ae6218d41e41d8e19f42e3142adc2d273e54901e45cd37ee946737e"} Dec 06 15:53:11 crc kubenswrapper[5003]: I1206 15:53:11.336330 5003 generic.go:334] "Generic (PLEG): container finished" podID="c0b60488-38ee-46bf-bc3f-20ab19554efd" containerID="c5c8c7f969cd45cf4bd68be5c741daf4a619f027d5218ad5ba25022f57f940ef" exitCode=0 Dec 06 15:53:11 crc kubenswrapper[5003]: I1206 15:53:11.336361 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="horizon-kuttl-tests/horizon-5b545c459d-zq4qt" event={"ID":"c0b60488-38ee-46bf-bc3f-20ab19554efd","Type":"ContainerDied","Data":"c5c8c7f969cd45cf4bd68be5c741daf4a619f027d5218ad5ba25022f57f940ef"} Dec 06 15:53:13 crc kubenswrapper[5003]: I1206 15:53:13.458541 5003 prober.go:107] "Probe failed" probeType="Readiness" pod="horizon-kuttl-tests/horizon-5b545c459d-zq4qt" podUID="c0b60488-38ee-46bf-bc3f-20ab19554efd" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.88:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.88:8443: connect: connection refused" Dec 06 15:53:13 crc kubenswrapper[5003]: I1206 15:53:13.518979 5003 prober.go:107] "Probe failed" probeType="Readiness" pod="horizon-kuttl-tests/horizon-579fd4dcd4-sb9ct" podUID="e02d7ccc-fcad-4ca3-9d5f-f966c47c2a66" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.89:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.89:8443: connect: connection refused" Dec 06 15:53:18 crc kubenswrapper[5003]: I1206 15:53:18.573246 5003 patch_prober.go:28] interesting pod/machine-config-daemon-w25db container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 06 15:53:18 crc kubenswrapper[5003]: I1206 15:53:18.573945 5003 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-w25db" podUID="1a047c4d-003e-4668-9b96-945eab34ab68" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 06 15:53:23 crc kubenswrapper[5003]: I1206 15:53:23.459008 5003 prober.go:107] "Probe failed" probeType="Readiness" pod="horizon-kuttl-tests/horizon-5b545c459d-zq4qt" podUID="c0b60488-38ee-46bf-bc3f-20ab19554efd" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.88:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.88:8443: connect: connection refused" Dec 06 15:53:23 crc kubenswrapper[5003]: I1206 15:53:23.517463 5003 prober.go:107] "Probe failed" probeType="Readiness" pod="horizon-kuttl-tests/horizon-579fd4dcd4-sb9ct" podUID="e02d7ccc-fcad-4ca3-9d5f-f966c47c2a66" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.89:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.89:8443: connect: connection refused" Dec 06 15:53:33 crc kubenswrapper[5003]: I1206 15:53:33.459775 5003 prober.go:107] "Probe failed" probeType="Readiness" pod="horizon-kuttl-tests/horizon-5b545c459d-zq4qt" podUID="c0b60488-38ee-46bf-bc3f-20ab19554efd" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.88:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.88:8443: connect: connection refused" Dec 06 15:53:33 crc kubenswrapper[5003]: I1206 15:53:33.460389 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="horizon-kuttl-tests/horizon-5b545c459d-zq4qt" Dec 06 15:53:33 crc kubenswrapper[5003]: I1206 15:53:33.517690 5003 prober.go:107] "Probe failed" probeType="Readiness" pod="horizon-kuttl-tests/horizon-579fd4dcd4-sb9ct" podUID="e02d7ccc-fcad-4ca3-9d5f-f966c47c2a66" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.89:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.89:8443: connect: connection refused" Dec 06 15:53:33 crc kubenswrapper[5003]: I1206 15:53:33.517807 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="horizon-kuttl-tests/horizon-579fd4dcd4-sb9ct" Dec 06 15:53:37 crc kubenswrapper[5003]: I1206 15:53:37.522368 5003 generic.go:334] "Generic (PLEG): container finished" podID="e02d7ccc-fcad-4ca3-9d5f-f966c47c2a66" containerID="6c2e752f888793d65d8000ddeffa4cd119cd51c58b44170649e7f8ae9748aa08" exitCode=137 Dec 06 15:53:37 crc kubenswrapper[5003]: I1206 15:53:37.522553 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="horizon-kuttl-tests/horizon-579fd4dcd4-sb9ct" event={"ID":"e02d7ccc-fcad-4ca3-9d5f-f966c47c2a66","Type":"ContainerDied","Data":"6c2e752f888793d65d8000ddeffa4cd119cd51c58b44170649e7f8ae9748aa08"} Dec 06 15:53:37 crc kubenswrapper[5003]: I1206 15:53:37.525086 5003 generic.go:334] "Generic (PLEG): container finished" podID="c0b60488-38ee-46bf-bc3f-20ab19554efd" containerID="a4a5d34a1f8238edaa6db392a7453e6df5e89701d83e7a256ff063fc0a471aef" exitCode=137 Dec 06 15:53:37 crc kubenswrapper[5003]: I1206 15:53:37.525130 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="horizon-kuttl-tests/horizon-5b545c459d-zq4qt" event={"ID":"c0b60488-38ee-46bf-bc3f-20ab19554efd","Type":"ContainerDied","Data":"a4a5d34a1f8238edaa6db392a7453e6df5e89701d83e7a256ff063fc0a471aef"} Dec 06 15:53:37 crc kubenswrapper[5003]: I1206 15:53:37.733766 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="horizon-kuttl-tests/horizon-579fd4dcd4-sb9ct" Dec 06 15:53:37 crc kubenswrapper[5003]: I1206 15:53:37.839352 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e02d7ccc-fcad-4ca3-9d5f-f966c47c2a66-logs\") pod \"e02d7ccc-fcad-4ca3-9d5f-f966c47c2a66\" (UID: \"e02d7ccc-fcad-4ca3-9d5f-f966c47c2a66\") " Dec 06 15:53:37 crc kubenswrapper[5003]: I1206 15:53:37.839404 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e02d7ccc-fcad-4ca3-9d5f-f966c47c2a66-combined-ca-bundle\") pod \"e02d7ccc-fcad-4ca3-9d5f-f966c47c2a66\" (UID: \"e02d7ccc-fcad-4ca3-9d5f-f966c47c2a66\") " Dec 06 15:53:37 crc kubenswrapper[5003]: I1206 15:53:37.839449 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e02d7ccc-fcad-4ca3-9d5f-f966c47c2a66-scripts\") pod \"e02d7ccc-fcad-4ca3-9d5f-f966c47c2a66\" (UID: \"e02d7ccc-fcad-4ca3-9d5f-f966c47c2a66\") " Dec 06 15:53:37 crc kubenswrapper[5003]: I1206 15:53:37.839577 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/e02d7ccc-fcad-4ca3-9d5f-f966c47c2a66-horizon-tls-certs\") pod \"e02d7ccc-fcad-4ca3-9d5f-f966c47c2a66\" (UID: \"e02d7ccc-fcad-4ca3-9d5f-f966c47c2a66\") " Dec 06 15:53:37 crc kubenswrapper[5003]: I1206 15:53:37.839644 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-p68lt\" (UniqueName: \"kubernetes.io/projected/e02d7ccc-fcad-4ca3-9d5f-f966c47c2a66-kube-api-access-p68lt\") pod \"e02d7ccc-fcad-4ca3-9d5f-f966c47c2a66\" (UID: \"e02d7ccc-fcad-4ca3-9d5f-f966c47c2a66\") " Dec 06 15:53:37 crc kubenswrapper[5003]: I1206 15:53:37.839701 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/e02d7ccc-fcad-4ca3-9d5f-f966c47c2a66-horizon-secret-key\") pod \"e02d7ccc-fcad-4ca3-9d5f-f966c47c2a66\" (UID: \"e02d7ccc-fcad-4ca3-9d5f-f966c47c2a66\") " Dec 06 15:53:37 crc kubenswrapper[5003]: I1206 15:53:37.839728 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/e02d7ccc-fcad-4ca3-9d5f-f966c47c2a66-config-data\") pod \"e02d7ccc-fcad-4ca3-9d5f-f966c47c2a66\" (UID: \"e02d7ccc-fcad-4ca3-9d5f-f966c47c2a66\") " Dec 06 15:53:37 crc kubenswrapper[5003]: I1206 15:53:37.840784 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e02d7ccc-fcad-4ca3-9d5f-f966c47c2a66-logs" (OuterVolumeSpecName: "logs") pod "e02d7ccc-fcad-4ca3-9d5f-f966c47c2a66" (UID: "e02d7ccc-fcad-4ca3-9d5f-f966c47c2a66"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 15:53:37 crc kubenswrapper[5003]: I1206 15:53:37.854243 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e02d7ccc-fcad-4ca3-9d5f-f966c47c2a66-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "e02d7ccc-fcad-4ca3-9d5f-f966c47c2a66" (UID: "e02d7ccc-fcad-4ca3-9d5f-f966c47c2a66"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 15:53:37 crc kubenswrapper[5003]: I1206 15:53:37.854261 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e02d7ccc-fcad-4ca3-9d5f-f966c47c2a66-kube-api-access-p68lt" (OuterVolumeSpecName: "kube-api-access-p68lt") pod "e02d7ccc-fcad-4ca3-9d5f-f966c47c2a66" (UID: "e02d7ccc-fcad-4ca3-9d5f-f966c47c2a66"). InnerVolumeSpecName "kube-api-access-p68lt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 15:53:37 crc kubenswrapper[5003]: I1206 15:53:37.856925 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e02d7ccc-fcad-4ca3-9d5f-f966c47c2a66-scripts" (OuterVolumeSpecName: "scripts") pod "e02d7ccc-fcad-4ca3-9d5f-f966c47c2a66" (UID: "e02d7ccc-fcad-4ca3-9d5f-f966c47c2a66"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 15:53:37 crc kubenswrapper[5003]: I1206 15:53:37.857112 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e02d7ccc-fcad-4ca3-9d5f-f966c47c2a66-config-data" (OuterVolumeSpecName: "config-data") pod "e02d7ccc-fcad-4ca3-9d5f-f966c47c2a66" (UID: "e02d7ccc-fcad-4ca3-9d5f-f966c47c2a66"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 15:53:37 crc kubenswrapper[5003]: I1206 15:53:37.861208 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e02d7ccc-fcad-4ca3-9d5f-f966c47c2a66-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e02d7ccc-fcad-4ca3-9d5f-f966c47c2a66" (UID: "e02d7ccc-fcad-4ca3-9d5f-f966c47c2a66"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 15:53:37 crc kubenswrapper[5003]: I1206 15:53:37.881717 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e02d7ccc-fcad-4ca3-9d5f-f966c47c2a66-horizon-tls-certs" (OuterVolumeSpecName: "horizon-tls-certs") pod "e02d7ccc-fcad-4ca3-9d5f-f966c47c2a66" (UID: "e02d7ccc-fcad-4ca3-9d5f-f966c47c2a66"). InnerVolumeSpecName "horizon-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 15:53:37 crc kubenswrapper[5003]: I1206 15:53:37.941342 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-p68lt\" (UniqueName: \"kubernetes.io/projected/e02d7ccc-fcad-4ca3-9d5f-f966c47c2a66-kube-api-access-p68lt\") on node \"crc\" DevicePath \"\"" Dec 06 15:53:37 crc kubenswrapper[5003]: I1206 15:53:37.941709 5003 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/e02d7ccc-fcad-4ca3-9d5f-f966c47c2a66-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Dec 06 15:53:37 crc kubenswrapper[5003]: I1206 15:53:37.941721 5003 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/e02d7ccc-fcad-4ca3-9d5f-f966c47c2a66-config-data\") on node \"crc\" DevicePath \"\"" Dec 06 15:53:37 crc kubenswrapper[5003]: I1206 15:53:37.941732 5003 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e02d7ccc-fcad-4ca3-9d5f-f966c47c2a66-logs\") on node \"crc\" DevicePath \"\"" Dec 06 15:53:37 crc kubenswrapper[5003]: I1206 15:53:37.941745 5003 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e02d7ccc-fcad-4ca3-9d5f-f966c47c2a66-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 06 15:53:37 crc kubenswrapper[5003]: I1206 15:53:37.941756 5003 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e02d7ccc-fcad-4ca3-9d5f-f966c47c2a66-scripts\") on node \"crc\" DevicePath \"\"" Dec 06 15:53:37 crc kubenswrapper[5003]: I1206 15:53:37.941766 5003 reconciler_common.go:293] "Volume detached for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/e02d7ccc-fcad-4ca3-9d5f-f966c47c2a66-horizon-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 06 15:53:38 crc kubenswrapper[5003]: I1206 15:53:38.021066 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="horizon-kuttl-tests/horizon-5b545c459d-zq4qt" Dec 06 15:53:38 crc kubenswrapper[5003]: I1206 15:53:38.144237 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/c0b60488-38ee-46bf-bc3f-20ab19554efd-horizon-secret-key\") pod \"c0b60488-38ee-46bf-bc3f-20ab19554efd\" (UID: \"c0b60488-38ee-46bf-bc3f-20ab19554efd\") " Dec 06 15:53:38 crc kubenswrapper[5003]: I1206 15:53:38.145060 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c0b60488-38ee-46bf-bc3f-20ab19554efd-scripts\") pod \"c0b60488-38ee-46bf-bc3f-20ab19554efd\" (UID: \"c0b60488-38ee-46bf-bc3f-20ab19554efd\") " Dec 06 15:53:38 crc kubenswrapper[5003]: I1206 15:53:38.145115 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c0b60488-38ee-46bf-bc3f-20ab19554efd-combined-ca-bundle\") pod \"c0b60488-38ee-46bf-bc3f-20ab19554efd\" (UID: \"c0b60488-38ee-46bf-bc3f-20ab19554efd\") " Dec 06 15:53:38 crc kubenswrapper[5003]: I1206 15:53:38.145147 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/c0b60488-38ee-46bf-bc3f-20ab19554efd-horizon-tls-certs\") pod \"c0b60488-38ee-46bf-bc3f-20ab19554efd\" (UID: \"c0b60488-38ee-46bf-bc3f-20ab19554efd\") " Dec 06 15:53:38 crc kubenswrapper[5003]: I1206 15:53:38.145257 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c0b60488-38ee-46bf-bc3f-20ab19554efd-logs\") pod \"c0b60488-38ee-46bf-bc3f-20ab19554efd\" (UID: \"c0b60488-38ee-46bf-bc3f-20ab19554efd\") " Dec 06 15:53:38 crc kubenswrapper[5003]: I1206 15:53:38.145291 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xqswz\" (UniqueName: \"kubernetes.io/projected/c0b60488-38ee-46bf-bc3f-20ab19554efd-kube-api-access-xqswz\") pod \"c0b60488-38ee-46bf-bc3f-20ab19554efd\" (UID: \"c0b60488-38ee-46bf-bc3f-20ab19554efd\") " Dec 06 15:53:38 crc kubenswrapper[5003]: I1206 15:53:38.145377 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c0b60488-38ee-46bf-bc3f-20ab19554efd-config-data\") pod \"c0b60488-38ee-46bf-bc3f-20ab19554efd\" (UID: \"c0b60488-38ee-46bf-bc3f-20ab19554efd\") " Dec 06 15:53:38 crc kubenswrapper[5003]: I1206 15:53:38.146019 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c0b60488-38ee-46bf-bc3f-20ab19554efd-logs" (OuterVolumeSpecName: "logs") pod "c0b60488-38ee-46bf-bc3f-20ab19554efd" (UID: "c0b60488-38ee-46bf-bc3f-20ab19554efd"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 15:53:38 crc kubenswrapper[5003]: I1206 15:53:38.148903 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c0b60488-38ee-46bf-bc3f-20ab19554efd-kube-api-access-xqswz" (OuterVolumeSpecName: "kube-api-access-xqswz") pod "c0b60488-38ee-46bf-bc3f-20ab19554efd" (UID: "c0b60488-38ee-46bf-bc3f-20ab19554efd"). InnerVolumeSpecName "kube-api-access-xqswz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 15:53:38 crc kubenswrapper[5003]: I1206 15:53:38.148917 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c0b60488-38ee-46bf-bc3f-20ab19554efd-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "c0b60488-38ee-46bf-bc3f-20ab19554efd" (UID: "c0b60488-38ee-46bf-bc3f-20ab19554efd"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 15:53:38 crc kubenswrapper[5003]: I1206 15:53:38.160017 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c0b60488-38ee-46bf-bc3f-20ab19554efd-config-data" (OuterVolumeSpecName: "config-data") pod "c0b60488-38ee-46bf-bc3f-20ab19554efd" (UID: "c0b60488-38ee-46bf-bc3f-20ab19554efd"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 15:53:38 crc kubenswrapper[5003]: I1206 15:53:38.214253 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c0b60488-38ee-46bf-bc3f-20ab19554efd-scripts" (OuterVolumeSpecName: "scripts") pod "c0b60488-38ee-46bf-bc3f-20ab19554efd" (UID: "c0b60488-38ee-46bf-bc3f-20ab19554efd"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 15:53:38 crc kubenswrapper[5003]: I1206 15:53:38.215944 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c0b60488-38ee-46bf-bc3f-20ab19554efd-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c0b60488-38ee-46bf-bc3f-20ab19554efd" (UID: "c0b60488-38ee-46bf-bc3f-20ab19554efd"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 15:53:38 crc kubenswrapper[5003]: I1206 15:53:38.232197 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c0b60488-38ee-46bf-bc3f-20ab19554efd-horizon-tls-certs" (OuterVolumeSpecName: "horizon-tls-certs") pod "c0b60488-38ee-46bf-bc3f-20ab19554efd" (UID: "c0b60488-38ee-46bf-bc3f-20ab19554efd"). InnerVolumeSpecName "horizon-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 15:53:38 crc kubenswrapper[5003]: I1206 15:53:38.247241 5003 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/c0b60488-38ee-46bf-bc3f-20ab19554efd-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Dec 06 15:53:38 crc kubenswrapper[5003]: I1206 15:53:38.247290 5003 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c0b60488-38ee-46bf-bc3f-20ab19554efd-scripts\") on node \"crc\" DevicePath \"\"" Dec 06 15:53:38 crc kubenswrapper[5003]: I1206 15:53:38.247303 5003 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c0b60488-38ee-46bf-bc3f-20ab19554efd-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 06 15:53:38 crc kubenswrapper[5003]: I1206 15:53:38.247315 5003 reconciler_common.go:293] "Volume detached for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/c0b60488-38ee-46bf-bc3f-20ab19554efd-horizon-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 06 15:53:38 crc kubenswrapper[5003]: I1206 15:53:38.247327 5003 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c0b60488-38ee-46bf-bc3f-20ab19554efd-logs\") on node \"crc\" DevicePath \"\"" Dec 06 15:53:38 crc kubenswrapper[5003]: I1206 15:53:38.247339 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xqswz\" (UniqueName: \"kubernetes.io/projected/c0b60488-38ee-46bf-bc3f-20ab19554efd-kube-api-access-xqswz\") on node \"crc\" DevicePath \"\"" Dec 06 15:53:38 crc kubenswrapper[5003]: I1206 15:53:38.247354 5003 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c0b60488-38ee-46bf-bc3f-20ab19554efd-config-data\") on node \"crc\" DevicePath \"\"" Dec 06 15:53:38 crc kubenswrapper[5003]: I1206 15:53:38.535261 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="horizon-kuttl-tests/horizon-579fd4dcd4-sb9ct" event={"ID":"e02d7ccc-fcad-4ca3-9d5f-f966c47c2a66","Type":"ContainerDied","Data":"745c9608ffb284f72cd7c31556cf802c214ef207db399cf8dde5948f321c9fbb"} Dec 06 15:53:38 crc kubenswrapper[5003]: I1206 15:53:38.535504 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="horizon-kuttl-tests/horizon-579fd4dcd4-sb9ct" Dec 06 15:53:38 crc kubenswrapper[5003]: I1206 15:53:38.536408 5003 scope.go:117] "RemoveContainer" containerID="ee4731862ae6218d41e41d8e19f42e3142adc2d273e54901e45cd37ee946737e" Dec 06 15:53:38 crc kubenswrapper[5003]: I1206 15:53:38.541904 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="horizon-kuttl-tests/horizon-5b545c459d-zq4qt" event={"ID":"c0b60488-38ee-46bf-bc3f-20ab19554efd","Type":"ContainerDied","Data":"667f836981e59a1de0c2975f92aabcd73635f19eb659ff0fad5f12a4ff087f5c"} Dec 06 15:53:38 crc kubenswrapper[5003]: I1206 15:53:38.541990 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="horizon-kuttl-tests/horizon-5b545c459d-zq4qt" Dec 06 15:53:38 crc kubenswrapper[5003]: I1206 15:53:38.585569 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["horizon-kuttl-tests/horizon-579fd4dcd4-sb9ct"] Dec 06 15:53:38 crc kubenswrapper[5003]: I1206 15:53:38.593447 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["horizon-kuttl-tests/horizon-579fd4dcd4-sb9ct"] Dec 06 15:53:38 crc kubenswrapper[5003]: I1206 15:53:38.601443 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["horizon-kuttl-tests/horizon-5b545c459d-zq4qt"] Dec 06 15:53:38 crc kubenswrapper[5003]: I1206 15:53:38.605994 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["horizon-kuttl-tests/horizon-5b545c459d-zq4qt"] Dec 06 15:53:38 crc kubenswrapper[5003]: I1206 15:53:38.701185 5003 scope.go:117] "RemoveContainer" containerID="6c2e752f888793d65d8000ddeffa4cd119cd51c58b44170649e7f8ae9748aa08" Dec 06 15:53:38 crc kubenswrapper[5003]: I1206 15:53:38.725440 5003 scope.go:117] "RemoveContainer" containerID="c5c8c7f969cd45cf4bd68be5c741daf4a619f027d5218ad5ba25022f57f940ef" Dec 06 15:53:38 crc kubenswrapper[5003]: I1206 15:53:38.880659 5003 scope.go:117] "RemoveContainer" containerID="a4a5d34a1f8238edaa6db392a7453e6df5e89701d83e7a256ff063fc0a471aef" Dec 06 15:53:39 crc kubenswrapper[5003]: I1206 15:53:39.723859 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c0b60488-38ee-46bf-bc3f-20ab19554efd" path="/var/lib/kubelet/pods/c0b60488-38ee-46bf-bc3f-20ab19554efd/volumes" Dec 06 15:53:39 crc kubenswrapper[5003]: I1206 15:53:39.725163 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e02d7ccc-fcad-4ca3-9d5f-f966c47c2a66" path="/var/lib/kubelet/pods/e02d7ccc-fcad-4ca3-9d5f-f966c47c2a66/volumes" Dec 06 15:53:45 crc kubenswrapper[5003]: E1206 15:53:45.094363 5003 upgradeaware.go:427] Error proxying data from client to backend: readfrom tcp 38.102.83.73:46660->38.102.83.73:37311: write tcp 38.102.83.73:46660->38.102.83.73:37311: write: broken pipe Dec 06 15:53:45 crc kubenswrapper[5003]: I1206 15:53:45.971115 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["horizon-kuttl-tests/keystone-bootstrap-hvc5l"] Dec 06 15:53:45 crc kubenswrapper[5003]: I1206 15:53:45.981457 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["horizon-kuttl-tests/keystone-db-sync-s6w7z"] Dec 06 15:53:45 crc kubenswrapper[5003]: I1206 15:53:45.983654 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["horizon-kuttl-tests/keystone-db-sync-s6w7z"] Dec 06 15:53:45 crc kubenswrapper[5003]: I1206 15:53:45.988231 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["horizon-kuttl-tests/keystone-bootstrap-hvc5l"] Dec 06 15:53:45 crc kubenswrapper[5003]: I1206 15:53:45.993309 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["horizon-kuttl-tests/keystone-bd6cbdc78-tjc97"] Dec 06 15:53:45 crc kubenswrapper[5003]: I1206 15:53:45.993770 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="horizon-kuttl-tests/keystone-bd6cbdc78-tjc97" podUID="1e075c35-aaca-468e-9276-0ce9bcb6a394" containerName="keystone-api" containerID="cri-o://a30cc65566f336c0c5a6b7bfe8ed6f2360be9b5ffd9ccbcd57ef0af0d7ef0b91" gracePeriod=30 Dec 06 15:53:46 crc kubenswrapper[5003]: I1206 15:53:46.029424 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["horizon-kuttl-tests/keystonefdf1-account-delete-mqw6h"] Dec 06 15:53:46 crc kubenswrapper[5003]: E1206 15:53:46.030001 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e02d7ccc-fcad-4ca3-9d5f-f966c47c2a66" containerName="horizon-log" Dec 06 15:53:46 crc kubenswrapper[5003]: I1206 15:53:46.030102 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="e02d7ccc-fcad-4ca3-9d5f-f966c47c2a66" containerName="horizon-log" Dec 06 15:53:46 crc kubenswrapper[5003]: E1206 15:53:46.030208 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c0b60488-38ee-46bf-bc3f-20ab19554efd" containerName="horizon-log" Dec 06 15:53:46 crc kubenswrapper[5003]: I1206 15:53:46.030309 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="c0b60488-38ee-46bf-bc3f-20ab19554efd" containerName="horizon-log" Dec 06 15:53:46 crc kubenswrapper[5003]: E1206 15:53:46.030406 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c0b60488-38ee-46bf-bc3f-20ab19554efd" containerName="horizon" Dec 06 15:53:46 crc kubenswrapper[5003]: I1206 15:53:46.030525 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="c0b60488-38ee-46bf-bc3f-20ab19554efd" containerName="horizon" Dec 06 15:53:46 crc kubenswrapper[5003]: E1206 15:53:46.030624 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e02d7ccc-fcad-4ca3-9d5f-f966c47c2a66" containerName="horizon" Dec 06 15:53:46 crc kubenswrapper[5003]: I1206 15:53:46.030703 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="e02d7ccc-fcad-4ca3-9d5f-f966c47c2a66" containerName="horizon" Dec 06 15:53:46 crc kubenswrapper[5003]: I1206 15:53:46.030951 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="e02d7ccc-fcad-4ca3-9d5f-f966c47c2a66" containerName="horizon-log" Dec 06 15:53:46 crc kubenswrapper[5003]: I1206 15:53:46.031049 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="e02d7ccc-fcad-4ca3-9d5f-f966c47c2a66" containerName="horizon" Dec 06 15:53:46 crc kubenswrapper[5003]: I1206 15:53:46.031133 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="c0b60488-38ee-46bf-bc3f-20ab19554efd" containerName="horizon-log" Dec 06 15:53:46 crc kubenswrapper[5003]: I1206 15:53:46.031218 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="c0b60488-38ee-46bf-bc3f-20ab19554efd" containerName="horizon" Dec 06 15:53:46 crc kubenswrapper[5003]: I1206 15:53:46.031850 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="horizon-kuttl-tests/keystonefdf1-account-delete-mqw6h" Dec 06 15:53:46 crc kubenswrapper[5003]: I1206 15:53:46.041193 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["horizon-kuttl-tests/keystonefdf1-account-delete-mqw6h"] Dec 06 15:53:46 crc kubenswrapper[5003]: I1206 15:53:46.173941 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ef1241df-7f65-49b6-b681-e332717e0d88-operator-scripts\") pod \"keystonefdf1-account-delete-mqw6h\" (UID: \"ef1241df-7f65-49b6-b681-e332717e0d88\") " pod="horizon-kuttl-tests/keystonefdf1-account-delete-mqw6h" Dec 06 15:53:46 crc kubenswrapper[5003]: I1206 15:53:46.174088 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4zqpc\" (UniqueName: \"kubernetes.io/projected/ef1241df-7f65-49b6-b681-e332717e0d88-kube-api-access-4zqpc\") pod \"keystonefdf1-account-delete-mqw6h\" (UID: \"ef1241df-7f65-49b6-b681-e332717e0d88\") " pod="horizon-kuttl-tests/keystonefdf1-account-delete-mqw6h" Dec 06 15:53:46 crc kubenswrapper[5003]: I1206 15:53:46.275579 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4zqpc\" (UniqueName: \"kubernetes.io/projected/ef1241df-7f65-49b6-b681-e332717e0d88-kube-api-access-4zqpc\") pod \"keystonefdf1-account-delete-mqw6h\" (UID: \"ef1241df-7f65-49b6-b681-e332717e0d88\") " pod="horizon-kuttl-tests/keystonefdf1-account-delete-mqw6h" Dec 06 15:53:46 crc kubenswrapper[5003]: I1206 15:53:46.275712 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ef1241df-7f65-49b6-b681-e332717e0d88-operator-scripts\") pod \"keystonefdf1-account-delete-mqw6h\" (UID: \"ef1241df-7f65-49b6-b681-e332717e0d88\") " pod="horizon-kuttl-tests/keystonefdf1-account-delete-mqw6h" Dec 06 15:53:46 crc kubenswrapper[5003]: I1206 15:53:46.276721 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ef1241df-7f65-49b6-b681-e332717e0d88-operator-scripts\") pod \"keystonefdf1-account-delete-mqw6h\" (UID: \"ef1241df-7f65-49b6-b681-e332717e0d88\") " pod="horizon-kuttl-tests/keystonefdf1-account-delete-mqw6h" Dec 06 15:53:46 crc kubenswrapper[5003]: I1206 15:53:46.294407 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4zqpc\" (UniqueName: \"kubernetes.io/projected/ef1241df-7f65-49b6-b681-e332717e0d88-kube-api-access-4zqpc\") pod \"keystonefdf1-account-delete-mqw6h\" (UID: \"ef1241df-7f65-49b6-b681-e332717e0d88\") " pod="horizon-kuttl-tests/keystonefdf1-account-delete-mqw6h" Dec 06 15:53:46 crc kubenswrapper[5003]: I1206 15:53:46.346705 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="horizon-kuttl-tests/keystonefdf1-account-delete-mqw6h" Dec 06 15:53:46 crc kubenswrapper[5003]: I1206 15:53:46.622278 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["horizon-kuttl-tests/keystonefdf1-account-delete-mqw6h"] Dec 06 15:53:46 crc kubenswrapper[5003]: I1206 15:53:46.835569 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["horizon-kuttl-tests/openstack-galera-0"] Dec 06 15:53:46 crc kubenswrapper[5003]: I1206 15:53:46.843981 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["horizon-kuttl-tests/openstack-galera-2"] Dec 06 15:53:46 crc kubenswrapper[5003]: I1206 15:53:46.853885 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["horizon-kuttl-tests/openstack-galera-1"] Dec 06 15:53:46 crc kubenswrapper[5003]: I1206 15:53:46.969989 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="horizon-kuttl-tests/openstack-galera-2" podUID="6433e1ae-768c-42ba-b961-4bd7bfba8701" containerName="galera" containerID="cri-o://3be2e48606ac441812b02fb904997ab6234188ea0ee8feeb3fc904dacb84da5f" gracePeriod=30 Dec 06 15:53:47 crc kubenswrapper[5003]: I1206 15:53:47.518381 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["horizon-kuttl-tests/memcached-0"] Dec 06 15:53:47 crc kubenswrapper[5003]: I1206 15:53:47.518712 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="horizon-kuttl-tests/memcached-0" podUID="69802b43-d4de-4ef5-9e10-9405562de3e7" containerName="memcached" containerID="cri-o://f4a85997302246bacff4774e7dabbc039ee9f5e10350d2603860ea80dc4d7e84" gracePeriod=30 Dec 06 15:53:47 crc kubenswrapper[5003]: I1206 15:53:47.620773 5003 generic.go:334] "Generic (PLEG): container finished" podID="ef1241df-7f65-49b6-b681-e332717e0d88" containerID="172229dd1f03f4155e8b527ca2eaf99a22620f316a2949aaba99f0f0d4a1e5a3" exitCode=1 Dec 06 15:53:47 crc kubenswrapper[5003]: I1206 15:53:47.620980 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="horizon-kuttl-tests/keystonefdf1-account-delete-mqw6h" event={"ID":"ef1241df-7f65-49b6-b681-e332717e0d88","Type":"ContainerDied","Data":"172229dd1f03f4155e8b527ca2eaf99a22620f316a2949aaba99f0f0d4a1e5a3"} Dec 06 15:53:47 crc kubenswrapper[5003]: I1206 15:53:47.621049 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="horizon-kuttl-tests/keystonefdf1-account-delete-mqw6h" event={"ID":"ef1241df-7f65-49b6-b681-e332717e0d88","Type":"ContainerStarted","Data":"56d9c42eab74970835ebbbc3f0cb310f5cdbe3ab0fbde24cc47a407908185485"} Dec 06 15:53:47 crc kubenswrapper[5003]: I1206 15:53:47.621376 5003 kubelet_pods.go:1007] "Unable to retrieve pull secret, the image pull may not succeed." pod="horizon-kuttl-tests/keystonefdf1-account-delete-mqw6h" secret="" err="secret \"galera-openstack-dockercfg-5h6p7\" not found" Dec 06 15:53:47 crc kubenswrapper[5003]: I1206 15:53:47.621429 5003 scope.go:117] "RemoveContainer" containerID="172229dd1f03f4155e8b527ca2eaf99a22620f316a2949aaba99f0f0d4a1e5a3" Dec 06 15:53:47 crc kubenswrapper[5003]: I1206 15:53:47.624753 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="horizon-kuttl-tests/openstack-galera-2" event={"ID":"6433e1ae-768c-42ba-b961-4bd7bfba8701","Type":"ContainerDied","Data":"3be2e48606ac441812b02fb904997ab6234188ea0ee8feeb3fc904dacb84da5f"} Dec 06 15:53:47 crc kubenswrapper[5003]: I1206 15:53:47.624729 5003 generic.go:334] "Generic (PLEG): container finished" podID="6433e1ae-768c-42ba-b961-4bd7bfba8701" containerID="3be2e48606ac441812b02fb904997ab6234188ea0ee8feeb3fc904dacb84da5f" exitCode=0 Dec 06 15:53:47 crc kubenswrapper[5003]: E1206 15:53:47.702904 5003 configmap.go:193] Couldn't get configMap horizon-kuttl-tests/openstack-scripts: configmap "openstack-scripts" not found Dec 06 15:53:47 crc kubenswrapper[5003]: E1206 15:53:47.703137 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/ef1241df-7f65-49b6-b681-e332717e0d88-operator-scripts podName:ef1241df-7f65-49b6-b681-e332717e0d88 nodeName:}" failed. No retries permitted until 2025-12-06 15:53:48.202971461 +0000 UTC m=+1306.736325842 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/ef1241df-7f65-49b6-b681-e332717e0d88-operator-scripts") pod "keystonefdf1-account-delete-mqw6h" (UID: "ef1241df-7f65-49b6-b681-e332717e0d88") : configmap "openstack-scripts" not found Dec 06 15:53:47 crc kubenswrapper[5003]: I1206 15:53:47.723584 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e76a0dac-849b-4bd7-90f5-f0f4d2a4382d" path="/var/lib/kubelet/pods/e76a0dac-849b-4bd7-90f5-f0f4d2a4382d/volumes" Dec 06 15:53:47 crc kubenswrapper[5003]: I1206 15:53:47.724252 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ea87d520-aeaf-4255-8015-7ce5ef802967" path="/var/lib/kubelet/pods/ea87d520-aeaf-4255-8015-7ce5ef802967/volumes" Dec 06 15:53:47 crc kubenswrapper[5003]: I1206 15:53:47.777725 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="horizon-kuttl-tests/openstack-galera-2" Dec 06 15:53:47 crc kubenswrapper[5003]: I1206 15:53:47.803334 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mysql-db\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"6433e1ae-768c-42ba-b961-4bd7bfba8701\" (UID: \"6433e1ae-768c-42ba-b961-4bd7bfba8701\") " Dec 06 15:53:47 crc kubenswrapper[5003]: I1206 15:53:47.803379 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qknf2\" (UniqueName: \"kubernetes.io/projected/6433e1ae-768c-42ba-b961-4bd7bfba8701-kube-api-access-qknf2\") pod \"6433e1ae-768c-42ba-b961-4bd7bfba8701\" (UID: \"6433e1ae-768c-42ba-b961-4bd7bfba8701\") " Dec 06 15:53:47 crc kubenswrapper[5003]: I1206 15:53:47.803425 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6433e1ae-768c-42ba-b961-4bd7bfba8701-operator-scripts\") pod \"6433e1ae-768c-42ba-b961-4bd7bfba8701\" (UID: \"6433e1ae-768c-42ba-b961-4bd7bfba8701\") " Dec 06 15:53:47 crc kubenswrapper[5003]: I1206 15:53:47.803479 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/6433e1ae-768c-42ba-b961-4bd7bfba8701-config-data-default\") pod \"6433e1ae-768c-42ba-b961-4bd7bfba8701\" (UID: \"6433e1ae-768c-42ba-b961-4bd7bfba8701\") " Dec 06 15:53:47 crc kubenswrapper[5003]: I1206 15:53:47.803579 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/6433e1ae-768c-42ba-b961-4bd7bfba8701-config-data-generated\") pod \"6433e1ae-768c-42ba-b961-4bd7bfba8701\" (UID: \"6433e1ae-768c-42ba-b961-4bd7bfba8701\") " Dec 06 15:53:47 crc kubenswrapper[5003]: I1206 15:53:47.803612 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/6433e1ae-768c-42ba-b961-4bd7bfba8701-kolla-config\") pod \"6433e1ae-768c-42ba-b961-4bd7bfba8701\" (UID: \"6433e1ae-768c-42ba-b961-4bd7bfba8701\") " Dec 06 15:53:47 crc kubenswrapper[5003]: I1206 15:53:47.805043 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6433e1ae-768c-42ba-b961-4bd7bfba8701-kolla-config" (OuterVolumeSpecName: "kolla-config") pod "6433e1ae-768c-42ba-b961-4bd7bfba8701" (UID: "6433e1ae-768c-42ba-b961-4bd7bfba8701"). InnerVolumeSpecName "kolla-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 15:53:47 crc kubenswrapper[5003]: I1206 15:53:47.805200 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6433e1ae-768c-42ba-b961-4bd7bfba8701-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "6433e1ae-768c-42ba-b961-4bd7bfba8701" (UID: "6433e1ae-768c-42ba-b961-4bd7bfba8701"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 15:53:47 crc kubenswrapper[5003]: I1206 15:53:47.805617 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6433e1ae-768c-42ba-b961-4bd7bfba8701-config-data-default" (OuterVolumeSpecName: "config-data-default") pod "6433e1ae-768c-42ba-b961-4bd7bfba8701" (UID: "6433e1ae-768c-42ba-b961-4bd7bfba8701"). InnerVolumeSpecName "config-data-default". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 15:53:47 crc kubenswrapper[5003]: I1206 15:53:47.806071 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6433e1ae-768c-42ba-b961-4bd7bfba8701-config-data-generated" (OuterVolumeSpecName: "config-data-generated") pod "6433e1ae-768c-42ba-b961-4bd7bfba8701" (UID: "6433e1ae-768c-42ba-b961-4bd7bfba8701"). InnerVolumeSpecName "config-data-generated". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 15:53:47 crc kubenswrapper[5003]: I1206 15:53:47.810642 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6433e1ae-768c-42ba-b961-4bd7bfba8701-kube-api-access-qknf2" (OuterVolumeSpecName: "kube-api-access-qknf2") pod "6433e1ae-768c-42ba-b961-4bd7bfba8701" (UID: "6433e1ae-768c-42ba-b961-4bd7bfba8701"). InnerVolumeSpecName "kube-api-access-qknf2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 15:53:47 crc kubenswrapper[5003]: I1206 15:53:47.815607 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage09-crc" (OuterVolumeSpecName: "mysql-db") pod "6433e1ae-768c-42ba-b961-4bd7bfba8701" (UID: "6433e1ae-768c-42ba-b961-4bd7bfba8701"). InnerVolumeSpecName "local-storage09-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 06 15:53:47 crc kubenswrapper[5003]: I1206 15:53:47.905233 5003 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") on node \"crc\" " Dec 06 15:53:47 crc kubenswrapper[5003]: I1206 15:53:47.905285 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qknf2\" (UniqueName: \"kubernetes.io/projected/6433e1ae-768c-42ba-b961-4bd7bfba8701-kube-api-access-qknf2\") on node \"crc\" DevicePath \"\"" Dec 06 15:53:47 crc kubenswrapper[5003]: I1206 15:53:47.905297 5003 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6433e1ae-768c-42ba-b961-4bd7bfba8701-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 06 15:53:47 crc kubenswrapper[5003]: I1206 15:53:47.905310 5003 reconciler_common.go:293] "Volume detached for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/6433e1ae-768c-42ba-b961-4bd7bfba8701-config-data-default\") on node \"crc\" DevicePath \"\"" Dec 06 15:53:47 crc kubenswrapper[5003]: I1206 15:53:47.905321 5003 reconciler_common.go:293] "Volume detached for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/6433e1ae-768c-42ba-b961-4bd7bfba8701-config-data-generated\") on node \"crc\" DevicePath \"\"" Dec 06 15:53:47 crc kubenswrapper[5003]: I1206 15:53:47.905331 5003 reconciler_common.go:293] "Volume detached for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/6433e1ae-768c-42ba-b961-4bd7bfba8701-kolla-config\") on node \"crc\" DevicePath \"\"" Dec 06 15:53:47 crc kubenswrapper[5003]: I1206 15:53:47.922057 5003 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage09-crc" (UniqueName: "kubernetes.io/local-volume/local-storage09-crc") on node "crc" Dec 06 15:53:47 crc kubenswrapper[5003]: I1206 15:53:47.976849 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["horizon-kuttl-tests/rabbitmq-server-0"] Dec 06 15:53:48 crc kubenswrapper[5003]: I1206 15:53:48.006815 5003 reconciler_common.go:293] "Volume detached for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") on node \"crc\" DevicePath \"\"" Dec 06 15:53:48 crc kubenswrapper[5003]: I1206 15:53:48.076596 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["horizon-kuttl-tests/rabbitmq-server-0"] Dec 06 15:53:48 crc kubenswrapper[5003]: E1206 15:53:48.209585 5003 configmap.go:193] Couldn't get configMap horizon-kuttl-tests/openstack-scripts: configmap "openstack-scripts" not found Dec 06 15:53:48 crc kubenswrapper[5003]: E1206 15:53:48.210135 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/ef1241df-7f65-49b6-b681-e332717e0d88-operator-scripts podName:ef1241df-7f65-49b6-b681-e332717e0d88 nodeName:}" failed. No retries permitted until 2025-12-06 15:53:49.210093524 +0000 UTC m=+1307.743447905 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/ef1241df-7f65-49b6-b681-e332717e0d88-operator-scripts") pod "keystonefdf1-account-delete-mqw6h" (UID: "ef1241df-7f65-49b6-b681-e332717e0d88") : configmap "openstack-scripts" not found Dec 06 15:53:48 crc kubenswrapper[5003]: I1206 15:53:48.572541 5003 patch_prober.go:28] interesting pod/machine-config-daemon-w25db container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 06 15:53:48 crc kubenswrapper[5003]: I1206 15:53:48.572608 5003 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-w25db" podUID="1a047c4d-003e-4668-9b96-945eab34ab68" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 06 15:53:48 crc kubenswrapper[5003]: I1206 15:53:48.594435 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="horizon-kuttl-tests/memcached-0" Dec 06 15:53:48 crc kubenswrapper[5003]: I1206 15:53:48.614833 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d25w5\" (UniqueName: \"kubernetes.io/projected/69802b43-d4de-4ef5-9e10-9405562de3e7-kube-api-access-d25w5\") pod \"69802b43-d4de-4ef5-9e10-9405562de3e7\" (UID: \"69802b43-d4de-4ef5-9e10-9405562de3e7\") " Dec 06 15:53:48 crc kubenswrapper[5003]: I1206 15:53:48.614896 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/69802b43-d4de-4ef5-9e10-9405562de3e7-kolla-config\") pod \"69802b43-d4de-4ef5-9e10-9405562de3e7\" (UID: \"69802b43-d4de-4ef5-9e10-9405562de3e7\") " Dec 06 15:53:48 crc kubenswrapper[5003]: I1206 15:53:48.614989 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/69802b43-d4de-4ef5-9e10-9405562de3e7-config-data\") pod \"69802b43-d4de-4ef5-9e10-9405562de3e7\" (UID: \"69802b43-d4de-4ef5-9e10-9405562de3e7\") " Dec 06 15:53:48 crc kubenswrapper[5003]: I1206 15:53:48.615773 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/69802b43-d4de-4ef5-9e10-9405562de3e7-config-data" (OuterVolumeSpecName: "config-data") pod "69802b43-d4de-4ef5-9e10-9405562de3e7" (UID: "69802b43-d4de-4ef5-9e10-9405562de3e7"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 15:53:48 crc kubenswrapper[5003]: I1206 15:53:48.615764 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/69802b43-d4de-4ef5-9e10-9405562de3e7-kolla-config" (OuterVolumeSpecName: "kolla-config") pod "69802b43-d4de-4ef5-9e10-9405562de3e7" (UID: "69802b43-d4de-4ef5-9e10-9405562de3e7"). InnerVolumeSpecName "kolla-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 15:53:48 crc kubenswrapper[5003]: I1206 15:53:48.625755 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/69802b43-d4de-4ef5-9e10-9405562de3e7-kube-api-access-d25w5" (OuterVolumeSpecName: "kube-api-access-d25w5") pod "69802b43-d4de-4ef5-9e10-9405562de3e7" (UID: "69802b43-d4de-4ef5-9e10-9405562de3e7"). InnerVolumeSpecName "kube-api-access-d25w5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 15:53:48 crc kubenswrapper[5003]: I1206 15:53:48.637006 5003 generic.go:334] "Generic (PLEG): container finished" podID="69802b43-d4de-4ef5-9e10-9405562de3e7" containerID="f4a85997302246bacff4774e7dabbc039ee9f5e10350d2603860ea80dc4d7e84" exitCode=0 Dec 06 15:53:48 crc kubenswrapper[5003]: I1206 15:53:48.637082 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="horizon-kuttl-tests/memcached-0" Dec 06 15:53:48 crc kubenswrapper[5003]: I1206 15:53:48.637120 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="horizon-kuttl-tests/memcached-0" event={"ID":"69802b43-d4de-4ef5-9e10-9405562de3e7","Type":"ContainerDied","Data":"f4a85997302246bacff4774e7dabbc039ee9f5e10350d2603860ea80dc4d7e84"} Dec 06 15:53:48 crc kubenswrapper[5003]: I1206 15:53:48.637151 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="horizon-kuttl-tests/memcached-0" event={"ID":"69802b43-d4de-4ef5-9e10-9405562de3e7","Type":"ContainerDied","Data":"5cb3ac3c117c6db88e2e379530af05ae8b64cf5e9a10e69445b954e4a6269a1b"} Dec 06 15:53:48 crc kubenswrapper[5003]: I1206 15:53:48.637172 5003 scope.go:117] "RemoveContainer" containerID="f4a85997302246bacff4774e7dabbc039ee9f5e10350d2603860ea80dc4d7e84" Dec 06 15:53:48 crc kubenswrapper[5003]: I1206 15:53:48.641777 5003 generic.go:334] "Generic (PLEG): container finished" podID="ef1241df-7f65-49b6-b681-e332717e0d88" containerID="9406822b4eabb53203268b24969408fe9fadbcac0c16c8f82eaf64e798133866" exitCode=1 Dec 06 15:53:48 crc kubenswrapper[5003]: I1206 15:53:48.641890 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="horizon-kuttl-tests/keystonefdf1-account-delete-mqw6h" event={"ID":"ef1241df-7f65-49b6-b681-e332717e0d88","Type":"ContainerDied","Data":"9406822b4eabb53203268b24969408fe9fadbcac0c16c8f82eaf64e798133866"} Dec 06 15:53:48 crc kubenswrapper[5003]: I1206 15:53:48.642655 5003 kubelet_pods.go:1007] "Unable to retrieve pull secret, the image pull may not succeed." pod="horizon-kuttl-tests/keystonefdf1-account-delete-mqw6h" secret="" err="secret \"galera-openstack-dockercfg-5h6p7\" not found" Dec 06 15:53:48 crc kubenswrapper[5003]: I1206 15:53:48.642691 5003 scope.go:117] "RemoveContainer" containerID="9406822b4eabb53203268b24969408fe9fadbcac0c16c8f82eaf64e798133866" Dec 06 15:53:48 crc kubenswrapper[5003]: E1206 15:53:48.643093 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mariadb-account-delete\" with CrashLoopBackOff: \"back-off 10s restarting failed container=mariadb-account-delete pod=keystonefdf1-account-delete-mqw6h_horizon-kuttl-tests(ef1241df-7f65-49b6-b681-e332717e0d88)\"" pod="horizon-kuttl-tests/keystonefdf1-account-delete-mqw6h" podUID="ef1241df-7f65-49b6-b681-e332717e0d88" Dec 06 15:53:48 crc kubenswrapper[5003]: I1206 15:53:48.647433 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="horizon-kuttl-tests/openstack-galera-2" event={"ID":"6433e1ae-768c-42ba-b961-4bd7bfba8701","Type":"ContainerDied","Data":"4792a8bf79d19819d2211dc61c35f34531de96a9d81fae032f20d0cc22f219cd"} Dec 06 15:53:48 crc kubenswrapper[5003]: I1206 15:53:48.647516 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="horizon-kuttl-tests/openstack-galera-2" Dec 06 15:53:48 crc kubenswrapper[5003]: I1206 15:53:48.667432 5003 scope.go:117] "RemoveContainer" containerID="f4a85997302246bacff4774e7dabbc039ee9f5e10350d2603860ea80dc4d7e84" Dec 06 15:53:48 crc kubenswrapper[5003]: E1206 15:53:48.668163 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f4a85997302246bacff4774e7dabbc039ee9f5e10350d2603860ea80dc4d7e84\": container with ID starting with f4a85997302246bacff4774e7dabbc039ee9f5e10350d2603860ea80dc4d7e84 not found: ID does not exist" containerID="f4a85997302246bacff4774e7dabbc039ee9f5e10350d2603860ea80dc4d7e84" Dec 06 15:53:48 crc kubenswrapper[5003]: I1206 15:53:48.668207 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f4a85997302246bacff4774e7dabbc039ee9f5e10350d2603860ea80dc4d7e84"} err="failed to get container status \"f4a85997302246bacff4774e7dabbc039ee9f5e10350d2603860ea80dc4d7e84\": rpc error: code = NotFound desc = could not find container \"f4a85997302246bacff4774e7dabbc039ee9f5e10350d2603860ea80dc4d7e84\": container with ID starting with f4a85997302246bacff4774e7dabbc039ee9f5e10350d2603860ea80dc4d7e84 not found: ID does not exist" Dec 06 15:53:48 crc kubenswrapper[5003]: I1206 15:53:48.668238 5003 scope.go:117] "RemoveContainer" containerID="172229dd1f03f4155e8b527ca2eaf99a22620f316a2949aaba99f0f0d4a1e5a3" Dec 06 15:53:48 crc kubenswrapper[5003]: I1206 15:53:48.678505 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["horizon-kuttl-tests/memcached-0"] Dec 06 15:53:48 crc kubenswrapper[5003]: I1206 15:53:48.684186 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["horizon-kuttl-tests/memcached-0"] Dec 06 15:53:48 crc kubenswrapper[5003]: I1206 15:53:48.699949 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="horizon-kuttl-tests/rabbitmq-server-0" podUID="27124a81-a0ad-4bc6-ad89-d2f5738570bc" containerName="rabbitmq" containerID="cri-o://5b0f91f5eeff936fbe0fe160ee20c396ff099899ce29f5cdd4fa1f12d6ea3004" gracePeriod=604800 Dec 06 15:53:48 crc kubenswrapper[5003]: I1206 15:53:48.717006 5003 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/69802b43-d4de-4ef5-9e10-9405562de3e7-config-data\") on node \"crc\" DevicePath \"\"" Dec 06 15:53:48 crc kubenswrapper[5003]: I1206 15:53:48.717033 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d25w5\" (UniqueName: \"kubernetes.io/projected/69802b43-d4de-4ef5-9e10-9405562de3e7-kube-api-access-d25w5\") on node \"crc\" DevicePath \"\"" Dec 06 15:53:48 crc kubenswrapper[5003]: I1206 15:53:48.717047 5003 reconciler_common.go:293] "Volume detached for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/69802b43-d4de-4ef5-9e10-9405562de3e7-kolla-config\") on node \"crc\" DevicePath \"\"" Dec 06 15:53:48 crc kubenswrapper[5003]: I1206 15:53:48.720251 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["horizon-kuttl-tests/openstack-galera-2"] Dec 06 15:53:48 crc kubenswrapper[5003]: I1206 15:53:48.724619 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["horizon-kuttl-tests/openstack-galera-2"] Dec 06 15:53:48 crc kubenswrapper[5003]: I1206 15:53:48.893082 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-5d45bd77f6-vsgls"] Dec 06 15:53:48 crc kubenswrapper[5003]: I1206 15:53:48.893292 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/horizon-operator-controller-manager-5d45bd77f6-vsgls" podUID="eada1e4a-eb19-4b1e-868d-31d913d7b85e" containerName="manager" containerID="cri-o://ddf6e0e2d9b57aabeef3242ccfcfb1a1c3ab1d2bb8eae8d911945e410c7e98ce" gracePeriod=10 Dec 06 15:53:49 crc kubenswrapper[5003]: I1206 15:53:49.089736 5003 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-operators/horizon-operator-controller-manager-5d45bd77f6-vsgls" podUID="eada1e4a-eb19-4b1e-868d-31d913d7b85e" containerName="manager" probeResult="failure" output="Get \"http://10.217.0.84:8081/readyz\": dial tcp 10.217.0.84:8081: connect: connection refused" Dec 06 15:53:49 crc kubenswrapper[5003]: I1206 15:53:49.142107 5003 prober.go:107] "Probe failed" probeType="Readiness" pod="horizon-kuttl-tests/keystone-bd6cbdc78-tjc97" podUID="1e075c35-aaca-468e-9276-0ce9bcb6a394" containerName="keystone-api" probeResult="failure" output="Get \"http://10.217.0.83:5000/v3\": read tcp 10.217.0.2:57864->10.217.0.83:5000: read: connection reset by peer" Dec 06 15:53:49 crc kubenswrapper[5003]: I1206 15:53:49.151251 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="horizon-kuttl-tests/openstack-galera-1" podUID="72422d98-0d90-4087-8aae-e78370f932b1" containerName="galera" containerID="cri-o://4bc3c2e758d8982d595f10a53008c877c956e5fc303fbcb4c027c58fa9a1e430" gracePeriod=28 Dec 06 15:53:49 crc kubenswrapper[5003]: I1206 15:53:49.187069 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/horizon-operator-index-mw2lw"] Dec 06 15:53:49 crc kubenswrapper[5003]: I1206 15:53:49.187266 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/horizon-operator-index-mw2lw" podUID="611b7f1b-1296-4dee-a189-7e38e1e1f0b9" containerName="registry-server" containerID="cri-o://ef5dd15fdf7318dcdd3307bc1ea72789ea6bddf6fa0765f2ce2c41cfd748a155" gracePeriod=30 Dec 06 15:53:49 crc kubenswrapper[5003]: E1206 15:53:49.223981 5003 configmap.go:193] Couldn't get configMap horizon-kuttl-tests/openstack-scripts: configmap "openstack-scripts" not found Dec 06 15:53:49 crc kubenswrapper[5003]: E1206 15:53:49.224057 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/ef1241df-7f65-49b6-b681-e332717e0d88-operator-scripts podName:ef1241df-7f65-49b6-b681-e332717e0d88 nodeName:}" failed. No retries permitted until 2025-12-06 15:53:51.224039572 +0000 UTC m=+1309.757393953 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/ef1241df-7f65-49b6-b681-e332717e0d88-operator-scripts") pod "keystonefdf1-account-delete-mqw6h" (UID: "ef1241df-7f65-49b6-b681-e332717e0d88") : configmap "openstack-scripts" not found Dec 06 15:53:49 crc kubenswrapper[5003]: I1206 15:53:49.229676 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/ba5562828bf80d0aea4e250c924daf1f8fc1de13aae41ce98e1a26408344tts"] Dec 06 15:53:49 crc kubenswrapper[5003]: I1206 15:53:49.234586 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/ba5562828bf80d0aea4e250c924daf1f8fc1de13aae41ce98e1a26408344tts"] Dec 06 15:53:49 crc kubenswrapper[5003]: I1206 15:53:49.325908 5003 scope.go:117] "RemoveContainer" containerID="3be2e48606ac441812b02fb904997ab6234188ea0ee8feeb3fc904dacb84da5f" Dec 06 15:53:49 crc kubenswrapper[5003]: I1206 15:53:49.530647 5003 scope.go:117] "RemoveContainer" containerID="48f91b4bca2bb0bb84182a7178f812d724c21803359edfb6b556c9946bec902e" Dec 06 15:53:49 crc kubenswrapper[5003]: I1206 15:53:49.654752 5003 generic.go:334] "Generic (PLEG): container finished" podID="eada1e4a-eb19-4b1e-868d-31d913d7b85e" containerID="ddf6e0e2d9b57aabeef3242ccfcfb1a1c3ab1d2bb8eae8d911945e410c7e98ce" exitCode=0 Dec 06 15:53:49 crc kubenswrapper[5003]: I1206 15:53:49.654839 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-5d45bd77f6-vsgls" event={"ID":"eada1e4a-eb19-4b1e-868d-31d913d7b85e","Type":"ContainerDied","Data":"ddf6e0e2d9b57aabeef3242ccfcfb1a1c3ab1d2bb8eae8d911945e410c7e98ce"} Dec 06 15:53:49 crc kubenswrapper[5003]: I1206 15:53:49.657218 5003 generic.go:334] "Generic (PLEG): container finished" podID="611b7f1b-1296-4dee-a189-7e38e1e1f0b9" containerID="ef5dd15fdf7318dcdd3307bc1ea72789ea6bddf6fa0765f2ce2c41cfd748a155" exitCode=0 Dec 06 15:53:49 crc kubenswrapper[5003]: I1206 15:53:49.657281 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-index-mw2lw" event={"ID":"611b7f1b-1296-4dee-a189-7e38e1e1f0b9","Type":"ContainerDied","Data":"ef5dd15fdf7318dcdd3307bc1ea72789ea6bddf6fa0765f2ce2c41cfd748a155"} Dec 06 15:53:49 crc kubenswrapper[5003]: I1206 15:53:49.659748 5003 generic.go:334] "Generic (PLEG): container finished" podID="1e075c35-aaca-468e-9276-0ce9bcb6a394" containerID="a30cc65566f336c0c5a6b7bfe8ed6f2360be9b5ffd9ccbcd57ef0af0d7ef0b91" exitCode=0 Dec 06 15:53:49 crc kubenswrapper[5003]: I1206 15:53:49.659809 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="horizon-kuttl-tests/keystone-bd6cbdc78-tjc97" event={"ID":"1e075c35-aaca-468e-9276-0ce9bcb6a394","Type":"ContainerDied","Data":"a30cc65566f336c0c5a6b7bfe8ed6f2360be9b5ffd9ccbcd57ef0af0d7ef0b91"} Dec 06 15:53:49 crc kubenswrapper[5003]: I1206 15:53:49.661603 5003 kubelet_pods.go:1007] "Unable to retrieve pull secret, the image pull may not succeed." pod="horizon-kuttl-tests/keystonefdf1-account-delete-mqw6h" secret="" err="secret \"galera-openstack-dockercfg-5h6p7\" not found" Dec 06 15:53:49 crc kubenswrapper[5003]: I1206 15:53:49.661646 5003 scope.go:117] "RemoveContainer" containerID="9406822b4eabb53203268b24969408fe9fadbcac0c16c8f82eaf64e798133866" Dec 06 15:53:49 crc kubenswrapper[5003]: E1206 15:53:49.661872 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mariadb-account-delete\" with CrashLoopBackOff: \"back-off 10s restarting failed container=mariadb-account-delete pod=keystonefdf1-account-delete-mqw6h_horizon-kuttl-tests(ef1241df-7f65-49b6-b681-e332717e0d88)\"" pod="horizon-kuttl-tests/keystonefdf1-account-delete-mqw6h" podUID="ef1241df-7f65-49b6-b681-e332717e0d88" Dec 06 15:53:49 crc kubenswrapper[5003]: I1206 15:53:49.722710 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6433e1ae-768c-42ba-b961-4bd7bfba8701" path="/var/lib/kubelet/pods/6433e1ae-768c-42ba-b961-4bd7bfba8701/volumes" Dec 06 15:53:49 crc kubenswrapper[5003]: I1206 15:53:49.723477 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="69802b43-d4de-4ef5-9e10-9405562de3e7" path="/var/lib/kubelet/pods/69802b43-d4de-4ef5-9e10-9405562de3e7/volumes" Dec 06 15:53:49 crc kubenswrapper[5003]: I1206 15:53:49.724288 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d73504d3-af7e-4ecf-a4a2-75d8b0aa8fa6" path="/var/lib/kubelet/pods/d73504d3-af7e-4ecf-a4a2-75d8b0aa8fa6/volumes" Dec 06 15:53:49 crc kubenswrapper[5003]: I1206 15:53:49.915727 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-5d45bd77f6-vsgls" Dec 06 15:53:49 crc kubenswrapper[5003]: I1206 15:53:49.933419 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/eada1e4a-eb19-4b1e-868d-31d913d7b85e-webhook-cert\") pod \"eada1e4a-eb19-4b1e-868d-31d913d7b85e\" (UID: \"eada1e4a-eb19-4b1e-868d-31d913d7b85e\") " Dec 06 15:53:49 crc kubenswrapper[5003]: I1206 15:53:49.933534 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8mxdl\" (UniqueName: \"kubernetes.io/projected/eada1e4a-eb19-4b1e-868d-31d913d7b85e-kube-api-access-8mxdl\") pod \"eada1e4a-eb19-4b1e-868d-31d913d7b85e\" (UID: \"eada1e4a-eb19-4b1e-868d-31d913d7b85e\") " Dec 06 15:53:49 crc kubenswrapper[5003]: I1206 15:53:49.933562 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/eada1e4a-eb19-4b1e-868d-31d913d7b85e-apiservice-cert\") pod \"eada1e4a-eb19-4b1e-868d-31d913d7b85e\" (UID: \"eada1e4a-eb19-4b1e-868d-31d913d7b85e\") " Dec 06 15:53:49 crc kubenswrapper[5003]: I1206 15:53:49.939083 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/eada1e4a-eb19-4b1e-868d-31d913d7b85e-kube-api-access-8mxdl" (OuterVolumeSpecName: "kube-api-access-8mxdl") pod "eada1e4a-eb19-4b1e-868d-31d913d7b85e" (UID: "eada1e4a-eb19-4b1e-868d-31d913d7b85e"). InnerVolumeSpecName "kube-api-access-8mxdl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 15:53:49 crc kubenswrapper[5003]: I1206 15:53:49.941568 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/eada1e4a-eb19-4b1e-868d-31d913d7b85e-apiservice-cert" (OuterVolumeSpecName: "apiservice-cert") pod "eada1e4a-eb19-4b1e-868d-31d913d7b85e" (UID: "eada1e4a-eb19-4b1e-868d-31d913d7b85e"). InnerVolumeSpecName "apiservice-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 15:53:49 crc kubenswrapper[5003]: I1206 15:53:49.941599 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/eada1e4a-eb19-4b1e-868d-31d913d7b85e-webhook-cert" (OuterVolumeSpecName: "webhook-cert") pod "eada1e4a-eb19-4b1e-868d-31d913d7b85e" (UID: "eada1e4a-eb19-4b1e-868d-31d913d7b85e"). InnerVolumeSpecName "webhook-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 15:53:50 crc kubenswrapper[5003]: I1206 15:53:50.009438 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="horizon-kuttl-tests/keystone-bd6cbdc78-tjc97" Dec 06 15:53:50 crc kubenswrapper[5003]: I1206 15:53:50.034852 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/1e075c35-aaca-468e-9276-0ce9bcb6a394-fernet-keys\") pod \"1e075c35-aaca-468e-9276-0ce9bcb6a394\" (UID: \"1e075c35-aaca-468e-9276-0ce9bcb6a394\") " Dec 06 15:53:50 crc kubenswrapper[5003]: I1206 15:53:50.034930 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/1e075c35-aaca-468e-9276-0ce9bcb6a394-credential-keys\") pod \"1e075c35-aaca-468e-9276-0ce9bcb6a394\" (UID: \"1e075c35-aaca-468e-9276-0ce9bcb6a394\") " Dec 06 15:53:50 crc kubenswrapper[5003]: I1206 15:53:50.034968 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1e075c35-aaca-468e-9276-0ce9bcb6a394-config-data\") pod \"1e075c35-aaca-468e-9276-0ce9bcb6a394\" (UID: \"1e075c35-aaca-468e-9276-0ce9bcb6a394\") " Dec 06 15:53:50 crc kubenswrapper[5003]: I1206 15:53:50.035037 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1e075c35-aaca-468e-9276-0ce9bcb6a394-scripts\") pod \"1e075c35-aaca-468e-9276-0ce9bcb6a394\" (UID: \"1e075c35-aaca-468e-9276-0ce9bcb6a394\") " Dec 06 15:53:50 crc kubenswrapper[5003]: I1206 15:53:50.035053 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tx6f2\" (UniqueName: \"kubernetes.io/projected/1e075c35-aaca-468e-9276-0ce9bcb6a394-kube-api-access-tx6f2\") pod \"1e075c35-aaca-468e-9276-0ce9bcb6a394\" (UID: \"1e075c35-aaca-468e-9276-0ce9bcb6a394\") " Dec 06 15:53:50 crc kubenswrapper[5003]: I1206 15:53:50.035387 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8mxdl\" (UniqueName: \"kubernetes.io/projected/eada1e4a-eb19-4b1e-868d-31d913d7b85e-kube-api-access-8mxdl\") on node \"crc\" DevicePath \"\"" Dec 06 15:53:50 crc kubenswrapper[5003]: I1206 15:53:50.035402 5003 reconciler_common.go:293] "Volume detached for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/eada1e4a-eb19-4b1e-868d-31d913d7b85e-apiservice-cert\") on node \"crc\" DevicePath \"\"" Dec 06 15:53:50 crc kubenswrapper[5003]: I1206 15:53:50.035412 5003 reconciler_common.go:293] "Volume detached for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/eada1e4a-eb19-4b1e-868d-31d913d7b85e-webhook-cert\") on node \"crc\" DevicePath \"\"" Dec 06 15:53:50 crc kubenswrapper[5003]: I1206 15:53:50.038903 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1e075c35-aaca-468e-9276-0ce9bcb6a394-scripts" (OuterVolumeSpecName: "scripts") pod "1e075c35-aaca-468e-9276-0ce9bcb6a394" (UID: "1e075c35-aaca-468e-9276-0ce9bcb6a394"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 15:53:50 crc kubenswrapper[5003]: I1206 15:53:50.039047 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1e075c35-aaca-468e-9276-0ce9bcb6a394-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "1e075c35-aaca-468e-9276-0ce9bcb6a394" (UID: "1e075c35-aaca-468e-9276-0ce9bcb6a394"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 15:53:50 crc kubenswrapper[5003]: I1206 15:53:50.039416 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1e075c35-aaca-468e-9276-0ce9bcb6a394-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "1e075c35-aaca-468e-9276-0ce9bcb6a394" (UID: "1e075c35-aaca-468e-9276-0ce9bcb6a394"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 15:53:50 crc kubenswrapper[5003]: I1206 15:53:50.039506 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1e075c35-aaca-468e-9276-0ce9bcb6a394-kube-api-access-tx6f2" (OuterVolumeSpecName: "kube-api-access-tx6f2") pod "1e075c35-aaca-468e-9276-0ce9bcb6a394" (UID: "1e075c35-aaca-468e-9276-0ce9bcb6a394"). InnerVolumeSpecName "kube-api-access-tx6f2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 15:53:50 crc kubenswrapper[5003]: I1206 15:53:50.050806 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1e075c35-aaca-468e-9276-0ce9bcb6a394-config-data" (OuterVolumeSpecName: "config-data") pod "1e075c35-aaca-468e-9276-0ce9bcb6a394" (UID: "1e075c35-aaca-468e-9276-0ce9bcb6a394"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 15:53:50 crc kubenswrapper[5003]: I1206 15:53:50.136709 5003 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1e075c35-aaca-468e-9276-0ce9bcb6a394-scripts\") on node \"crc\" DevicePath \"\"" Dec 06 15:53:50 crc kubenswrapper[5003]: I1206 15:53:50.136749 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tx6f2\" (UniqueName: \"kubernetes.io/projected/1e075c35-aaca-468e-9276-0ce9bcb6a394-kube-api-access-tx6f2\") on node \"crc\" DevicePath \"\"" Dec 06 15:53:50 crc kubenswrapper[5003]: I1206 15:53:50.136763 5003 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/1e075c35-aaca-468e-9276-0ce9bcb6a394-fernet-keys\") on node \"crc\" DevicePath \"\"" Dec 06 15:53:50 crc kubenswrapper[5003]: I1206 15:53:50.136777 5003 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/1e075c35-aaca-468e-9276-0ce9bcb6a394-credential-keys\") on node \"crc\" DevicePath \"\"" Dec 06 15:53:50 crc kubenswrapper[5003]: I1206 15:53:50.136789 5003 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1e075c35-aaca-468e-9276-0ce9bcb6a394-config-data\") on node \"crc\" DevicePath \"\"" Dec 06 15:53:50 crc kubenswrapper[5003]: I1206 15:53:50.276004 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-index-mw2lw" Dec 06 15:53:50 crc kubenswrapper[5003]: I1206 15:53:50.338214 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pw6qg\" (UniqueName: \"kubernetes.io/projected/611b7f1b-1296-4dee-a189-7e38e1e1f0b9-kube-api-access-pw6qg\") pod \"611b7f1b-1296-4dee-a189-7e38e1e1f0b9\" (UID: \"611b7f1b-1296-4dee-a189-7e38e1e1f0b9\") " Dec 06 15:53:50 crc kubenswrapper[5003]: I1206 15:53:50.342092 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/611b7f1b-1296-4dee-a189-7e38e1e1f0b9-kube-api-access-pw6qg" (OuterVolumeSpecName: "kube-api-access-pw6qg") pod "611b7f1b-1296-4dee-a189-7e38e1e1f0b9" (UID: "611b7f1b-1296-4dee-a189-7e38e1e1f0b9"). InnerVolumeSpecName "kube-api-access-pw6qg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 15:53:50 crc kubenswrapper[5003]: I1206 15:53:50.440426 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pw6qg\" (UniqueName: \"kubernetes.io/projected/611b7f1b-1296-4dee-a189-7e38e1e1f0b9-kube-api-access-pw6qg\") on node \"crc\" DevicePath \"\"" Dec 06 15:53:50 crc kubenswrapper[5003]: I1206 15:53:50.676189 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-7fb9dff8cd-l4z2z"] Dec 06 15:53:50 crc kubenswrapper[5003]: I1206 15:53:50.676452 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/keystone-operator-controller-manager-7fb9dff8cd-l4z2z" podUID="eb973b37-d488-4739-9c25-96885cc3158b" containerName="manager" containerID="cri-o://d4e5fe15f47d713486a4ea61a9d4517d28fc3ca1358eaf680cc145c4fa329d32" gracePeriod=10 Dec 06 15:53:50 crc kubenswrapper[5003]: I1206 15:53:50.709485 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="horizon-kuttl-tests/keystone-bd6cbdc78-tjc97" event={"ID":"1e075c35-aaca-468e-9276-0ce9bcb6a394","Type":"ContainerDied","Data":"766e91bc451a7beb901ee88a94c9002cc32ed0b52fab48f3a5ee3c89a8fb5ac4"} Dec 06 15:53:50 crc kubenswrapper[5003]: I1206 15:53:50.709552 5003 scope.go:117] "RemoveContainer" containerID="a30cc65566f336c0c5a6b7bfe8ed6f2360be9b5ffd9ccbcd57ef0af0d7ef0b91" Dec 06 15:53:50 crc kubenswrapper[5003]: I1206 15:53:50.709573 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="horizon-kuttl-tests/keystone-bd6cbdc78-tjc97" Dec 06 15:53:50 crc kubenswrapper[5003]: I1206 15:53:50.711884 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-5d45bd77f6-vsgls" event={"ID":"eada1e4a-eb19-4b1e-868d-31d913d7b85e","Type":"ContainerDied","Data":"265af085402c7fdc21be5171e44306de446435758f000bbae65c4a7118659b94"} Dec 06 15:53:50 crc kubenswrapper[5003]: I1206 15:53:50.711962 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-5d45bd77f6-vsgls" Dec 06 15:53:50 crc kubenswrapper[5003]: I1206 15:53:50.724914 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-index-mw2lw" event={"ID":"611b7f1b-1296-4dee-a189-7e38e1e1f0b9","Type":"ContainerDied","Data":"d93ffe1d145f0d2f18a16d75429ec17df29e121eabc7a74c5137b7aa55534bee"} Dec 06 15:53:50 crc kubenswrapper[5003]: I1206 15:53:50.725005 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-index-mw2lw" Dec 06 15:53:50 crc kubenswrapper[5003]: I1206 15:53:50.734869 5003 scope.go:117] "RemoveContainer" containerID="ddf6e0e2d9b57aabeef3242ccfcfb1a1c3ab1d2bb8eae8d911945e410c7e98ce" Dec 06 15:53:50 crc kubenswrapper[5003]: I1206 15:53:50.735206 5003 generic.go:334] "Generic (PLEG): container finished" podID="27124a81-a0ad-4bc6-ad89-d2f5738570bc" containerID="5b0f91f5eeff936fbe0fe160ee20c396ff099899ce29f5cdd4fa1f12d6ea3004" exitCode=0 Dec 06 15:53:50 crc kubenswrapper[5003]: I1206 15:53:50.735242 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="horizon-kuttl-tests/rabbitmq-server-0" event={"ID":"27124a81-a0ad-4bc6-ad89-d2f5738570bc","Type":"ContainerDied","Data":"5b0f91f5eeff936fbe0fe160ee20c396ff099899ce29f5cdd4fa1f12d6ea3004"} Dec 06 15:53:50 crc kubenswrapper[5003]: I1206 15:53:50.750590 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-5d45bd77f6-vsgls"] Dec 06 15:53:50 crc kubenswrapper[5003]: I1206 15:53:50.754504 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-5d45bd77f6-vsgls"] Dec 06 15:53:50 crc kubenswrapper[5003]: I1206 15:53:50.766011 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["horizon-kuttl-tests/keystone-bd6cbdc78-tjc97"] Dec 06 15:53:50 crc kubenswrapper[5003]: I1206 15:53:50.770019 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["horizon-kuttl-tests/keystone-bd6cbdc78-tjc97"] Dec 06 15:53:50 crc kubenswrapper[5003]: I1206 15:53:50.770722 5003 scope.go:117] "RemoveContainer" containerID="ef5dd15fdf7318dcdd3307bc1ea72789ea6bddf6fa0765f2ce2c41cfd748a155" Dec 06 15:53:50 crc kubenswrapper[5003]: I1206 15:53:50.780447 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/horizon-operator-index-mw2lw"] Dec 06 15:53:50 crc kubenswrapper[5003]: I1206 15:53:50.785804 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/horizon-operator-index-mw2lw"] Dec 06 15:53:50 crc kubenswrapper[5003]: I1206 15:53:50.890335 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/keystone-operator-index-4qn9h"] Dec 06 15:53:50 crc kubenswrapper[5003]: I1206 15:53:50.890570 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/keystone-operator-index-4qn9h" podUID="ff5afab4-f287-43ad-bf14-7ac8c90a52e3" containerName="registry-server" containerID="cri-o://42f4134bfb546e8795f6c7a557eac50c015cd43f87e79a13c6d099d6721f0418" gracePeriod=30 Dec 06 15:53:50 crc kubenswrapper[5003]: I1206 15:53:50.938154 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/835551ba8f27f4fd61e1b05ebed5cb285496b645cbb6fd0ac403227c856dznq"] Dec 06 15:53:50 crc kubenswrapper[5003]: I1206 15:53:50.944532 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/835551ba8f27f4fd61e1b05ebed5cb285496b645cbb6fd0ac403227c856dznq"] Dec 06 15:53:50 crc kubenswrapper[5003]: I1206 15:53:50.981744 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="horizon-kuttl-tests/rabbitmq-server-0" Dec 06 15:53:51 crc kubenswrapper[5003]: I1206 15:53:51.047818 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/27124a81-a0ad-4bc6-ad89-d2f5738570bc-rabbitmq-erlang-cookie\") pod \"27124a81-a0ad-4bc6-ad89-d2f5738570bc\" (UID: \"27124a81-a0ad-4bc6-ad89-d2f5738570bc\") " Dec 06 15:53:51 crc kubenswrapper[5003]: I1206 15:53:51.047943 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/27124a81-a0ad-4bc6-ad89-d2f5738570bc-rabbitmq-confd\") pod \"27124a81-a0ad-4bc6-ad89-d2f5738570bc\" (UID: \"27124a81-a0ad-4bc6-ad89-d2f5738570bc\") " Dec 06 15:53:51 crc kubenswrapper[5003]: I1206 15:53:51.047972 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/27124a81-a0ad-4bc6-ad89-d2f5738570bc-pod-info\") pod \"27124a81-a0ad-4bc6-ad89-d2f5738570bc\" (UID: \"27124a81-a0ad-4bc6-ad89-d2f5738570bc\") " Dec 06 15:53:51 crc kubenswrapper[5003]: I1206 15:53:51.048008 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/27124a81-a0ad-4bc6-ad89-d2f5738570bc-erlang-cookie-secret\") pod \"27124a81-a0ad-4bc6-ad89-d2f5738570bc\" (UID: \"27124a81-a0ad-4bc6-ad89-d2f5738570bc\") " Dec 06 15:53:51 crc kubenswrapper[5003]: I1206 15:53:51.048040 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vk5tk\" (UniqueName: \"kubernetes.io/projected/27124a81-a0ad-4bc6-ad89-d2f5738570bc-kube-api-access-vk5tk\") pod \"27124a81-a0ad-4bc6-ad89-d2f5738570bc\" (UID: \"27124a81-a0ad-4bc6-ad89-d2f5738570bc\") " Dec 06 15:53:51 crc kubenswrapper[5003]: I1206 15:53:51.048074 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/27124a81-a0ad-4bc6-ad89-d2f5738570bc-rabbitmq-plugins\") pod \"27124a81-a0ad-4bc6-ad89-d2f5738570bc\" (UID: \"27124a81-a0ad-4bc6-ad89-d2f5738570bc\") " Dec 06 15:53:51 crc kubenswrapper[5003]: I1206 15:53:51.048108 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/27124a81-a0ad-4bc6-ad89-d2f5738570bc-plugins-conf\") pod \"27124a81-a0ad-4bc6-ad89-d2f5738570bc\" (UID: \"27124a81-a0ad-4bc6-ad89-d2f5738570bc\") " Dec 06 15:53:51 crc kubenswrapper[5003]: I1206 15:53:51.048346 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-fc028333-0fe0-4309-89b9-caaa4c0882da\") pod \"27124a81-a0ad-4bc6-ad89-d2f5738570bc\" (UID: \"27124a81-a0ad-4bc6-ad89-d2f5738570bc\") " Dec 06 15:53:51 crc kubenswrapper[5003]: I1206 15:53:51.049322 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/27124a81-a0ad-4bc6-ad89-d2f5738570bc-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "27124a81-a0ad-4bc6-ad89-d2f5738570bc" (UID: "27124a81-a0ad-4bc6-ad89-d2f5738570bc"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 15:53:51 crc kubenswrapper[5003]: I1206 15:53:51.049831 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["horizon-kuttl-tests/keystone-db-create-nkp64"] Dec 06 15:53:51 crc kubenswrapper[5003]: I1206 15:53:51.050305 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/27124a81-a0ad-4bc6-ad89-d2f5738570bc-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "27124a81-a0ad-4bc6-ad89-d2f5738570bc" (UID: "27124a81-a0ad-4bc6-ad89-d2f5738570bc"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 15:53:51 crc kubenswrapper[5003]: I1206 15:53:51.050429 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/27124a81-a0ad-4bc6-ad89-d2f5738570bc-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "27124a81-a0ad-4bc6-ad89-d2f5738570bc" (UID: "27124a81-a0ad-4bc6-ad89-d2f5738570bc"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 15:53:51 crc kubenswrapper[5003]: I1206 15:53:51.054919 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/27124a81-a0ad-4bc6-ad89-d2f5738570bc-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "27124a81-a0ad-4bc6-ad89-d2f5738570bc" (UID: "27124a81-a0ad-4bc6-ad89-d2f5738570bc"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 15:53:51 crc kubenswrapper[5003]: I1206 15:53:51.055152 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/27124a81-a0ad-4bc6-ad89-d2f5738570bc-kube-api-access-vk5tk" (OuterVolumeSpecName: "kube-api-access-vk5tk") pod "27124a81-a0ad-4bc6-ad89-d2f5738570bc" (UID: "27124a81-a0ad-4bc6-ad89-d2f5738570bc"). InnerVolumeSpecName "kube-api-access-vk5tk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 15:53:51 crc kubenswrapper[5003]: I1206 15:53:51.057596 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/27124a81-a0ad-4bc6-ad89-d2f5738570bc-pod-info" (OuterVolumeSpecName: "pod-info") pod "27124a81-a0ad-4bc6-ad89-d2f5738570bc" (UID: "27124a81-a0ad-4bc6-ad89-d2f5738570bc"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Dec 06 15:53:51 crc kubenswrapper[5003]: I1206 15:53:51.069346 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-fc028333-0fe0-4309-89b9-caaa4c0882da" (OuterVolumeSpecName: "persistence") pod "27124a81-a0ad-4bc6-ad89-d2f5738570bc" (UID: "27124a81-a0ad-4bc6-ad89-d2f5738570bc"). InnerVolumeSpecName "pvc-fc028333-0fe0-4309-89b9-caaa4c0882da". PluginName "kubernetes.io/csi", VolumeGidValue "" Dec 06 15:53:51 crc kubenswrapper[5003]: I1206 15:53:51.084666 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="horizon-kuttl-tests/openstack-galera-0" podUID="6b45e98d-ae85-45e0-b565-11db9addcad3" containerName="galera" containerID="cri-o://133fc0f206a2ac359eedda713017b70e001923ecd02badee3cfce4e2f77bbc57" gracePeriod=26 Dec 06 15:53:51 crc kubenswrapper[5003]: I1206 15:53:51.084845 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["horizon-kuttl-tests/keystone-db-create-nkp64"] Dec 06 15:53:51 crc kubenswrapper[5003]: I1206 15:53:51.089795 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["horizon-kuttl-tests/keystonefdf1-account-delete-mqw6h"] Dec 06 15:53:51 crc kubenswrapper[5003]: I1206 15:53:51.094391 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["horizon-kuttl-tests/keystone-fdf1-account-create-update-px2s8"] Dec 06 15:53:51 crc kubenswrapper[5003]: I1206 15:53:51.099736 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["horizon-kuttl-tests/keystone-fdf1-account-create-update-px2s8"] Dec 06 15:53:51 crc kubenswrapper[5003]: I1206 15:53:51.114901 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/27124a81-a0ad-4bc6-ad89-d2f5738570bc-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "27124a81-a0ad-4bc6-ad89-d2f5738570bc" (UID: "27124a81-a0ad-4bc6-ad89-d2f5738570bc"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 15:53:51 crc kubenswrapper[5003]: I1206 15:53:51.150548 5003 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/27124a81-a0ad-4bc6-ad89-d2f5738570bc-pod-info\") on node \"crc\" DevicePath \"\"" Dec 06 15:53:51 crc kubenswrapper[5003]: I1206 15:53:51.150591 5003 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/27124a81-a0ad-4bc6-ad89-d2f5738570bc-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Dec 06 15:53:51 crc kubenswrapper[5003]: I1206 15:53:51.150605 5003 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/27124a81-a0ad-4bc6-ad89-d2f5738570bc-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Dec 06 15:53:51 crc kubenswrapper[5003]: I1206 15:53:51.150618 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vk5tk\" (UniqueName: \"kubernetes.io/projected/27124a81-a0ad-4bc6-ad89-d2f5738570bc-kube-api-access-vk5tk\") on node \"crc\" DevicePath \"\"" Dec 06 15:53:51 crc kubenswrapper[5003]: I1206 15:53:51.150629 5003 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/27124a81-a0ad-4bc6-ad89-d2f5738570bc-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Dec 06 15:53:51 crc kubenswrapper[5003]: I1206 15:53:51.150640 5003 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/27124a81-a0ad-4bc6-ad89-d2f5738570bc-plugins-conf\") on node \"crc\" DevicePath \"\"" Dec 06 15:53:51 crc kubenswrapper[5003]: I1206 15:53:51.150672 5003 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"pvc-fc028333-0fe0-4309-89b9-caaa4c0882da\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-fc028333-0fe0-4309-89b9-caaa4c0882da\") on node \"crc\" " Dec 06 15:53:51 crc kubenswrapper[5003]: I1206 15:53:51.150682 5003 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/27124a81-a0ad-4bc6-ad89-d2f5738570bc-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Dec 06 15:53:51 crc kubenswrapper[5003]: E1206 15:53:51.275598 5003 configmap.go:193] Couldn't get configMap horizon-kuttl-tests/openstack-scripts: configmap "openstack-scripts" not found Dec 06 15:53:51 crc kubenswrapper[5003]: E1206 15:53:51.275672 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/ef1241df-7f65-49b6-b681-e332717e0d88-operator-scripts podName:ef1241df-7f65-49b6-b681-e332717e0d88 nodeName:}" failed. No retries permitted until 2025-12-06 15:53:55.275653839 +0000 UTC m=+1313.809008220 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/ef1241df-7f65-49b6-b681-e332717e0d88-operator-scripts") pod "keystonefdf1-account-delete-mqw6h" (UID: "ef1241df-7f65-49b6-b681-e332717e0d88") : configmap "openstack-scripts" not found Dec 06 15:53:51 crc kubenswrapper[5003]: I1206 15:53:51.282689 5003 csi_attacher.go:630] kubernetes.io/csi: attacher.UnmountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping UnmountDevice... Dec 06 15:53:51 crc kubenswrapper[5003]: I1206 15:53:51.282921 5003 operation_generator.go:917] UnmountDevice succeeded for volume "pvc-fc028333-0fe0-4309-89b9-caaa4c0882da" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-fc028333-0fe0-4309-89b9-caaa4c0882da") on node "crc" Dec 06 15:53:51 crc kubenswrapper[5003]: I1206 15:53:51.376614 5003 reconciler_common.go:293] "Volume detached for volume \"pvc-fc028333-0fe0-4309-89b9-caaa4c0882da\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-fc028333-0fe0-4309-89b9-caaa4c0882da\") on node \"crc\" DevicePath \"\"" Dec 06 15:53:51 crc kubenswrapper[5003]: I1206 15:53:51.499056 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="horizon-kuttl-tests/keystonefdf1-account-delete-mqw6h" Dec 06 15:53:51 crc kubenswrapper[5003]: I1206 15:53:51.651039 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-7fb9dff8cd-l4z2z" Dec 06 15:53:51 crc kubenswrapper[5003]: I1206 15:53:51.679020 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ef1241df-7f65-49b6-b681-e332717e0d88-operator-scripts\") pod \"ef1241df-7f65-49b6-b681-e332717e0d88\" (UID: \"ef1241df-7f65-49b6-b681-e332717e0d88\") " Dec 06 15:53:51 crc kubenswrapper[5003]: I1206 15:53:51.679207 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4zqpc\" (UniqueName: \"kubernetes.io/projected/ef1241df-7f65-49b6-b681-e332717e0d88-kube-api-access-4zqpc\") pod \"ef1241df-7f65-49b6-b681-e332717e0d88\" (UID: \"ef1241df-7f65-49b6-b681-e332717e0d88\") " Dec 06 15:53:51 crc kubenswrapper[5003]: I1206 15:53:51.680596 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ef1241df-7f65-49b6-b681-e332717e0d88-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "ef1241df-7f65-49b6-b681-e332717e0d88" (UID: "ef1241df-7f65-49b6-b681-e332717e0d88"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 15:53:51 crc kubenswrapper[5003]: I1206 15:53:51.685452 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ef1241df-7f65-49b6-b681-e332717e0d88-kube-api-access-4zqpc" (OuterVolumeSpecName: "kube-api-access-4zqpc") pod "ef1241df-7f65-49b6-b681-e332717e0d88" (UID: "ef1241df-7f65-49b6-b681-e332717e0d88"). InnerVolumeSpecName "kube-api-access-4zqpc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 15:53:51 crc kubenswrapper[5003]: I1206 15:53:51.722212 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="03fe4942-5f11-47e3-86ec-83f19111191a" path="/var/lib/kubelet/pods/03fe4942-5f11-47e3-86ec-83f19111191a/volumes" Dec 06 15:53:51 crc kubenswrapper[5003]: I1206 15:53:51.723003 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1c1789cc-9c03-4473-a097-337f66aa38e9" path="/var/lib/kubelet/pods/1c1789cc-9c03-4473-a097-337f66aa38e9/volumes" Dec 06 15:53:51 crc kubenswrapper[5003]: I1206 15:53:51.723584 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1e075c35-aaca-468e-9276-0ce9bcb6a394" path="/var/lib/kubelet/pods/1e075c35-aaca-468e-9276-0ce9bcb6a394/volumes" Dec 06 15:53:51 crc kubenswrapper[5003]: I1206 15:53:51.724670 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="611b7f1b-1296-4dee-a189-7e38e1e1f0b9" path="/var/lib/kubelet/pods/611b7f1b-1296-4dee-a189-7e38e1e1f0b9/volumes" Dec 06 15:53:51 crc kubenswrapper[5003]: I1206 15:53:51.725280 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ab07907f-273d-4c4a-844d-74244e74ffe4" path="/var/lib/kubelet/pods/ab07907f-273d-4c4a-844d-74244e74ffe4/volumes" Dec 06 15:53:51 crc kubenswrapper[5003]: I1206 15:53:51.725845 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="eada1e4a-eb19-4b1e-868d-31d913d7b85e" path="/var/lib/kubelet/pods/eada1e4a-eb19-4b1e-868d-31d913d7b85e/volumes" Dec 06 15:53:51 crc kubenswrapper[5003]: I1206 15:53:51.745832 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="horizon-kuttl-tests/rabbitmq-server-0" event={"ID":"27124a81-a0ad-4bc6-ad89-d2f5738570bc","Type":"ContainerDied","Data":"7bc373d08e7144f50ba119cac94dfd299766858f73f931d5488697052288770e"} Dec 06 15:53:51 crc kubenswrapper[5003]: I1206 15:53:51.746028 5003 scope.go:117] "RemoveContainer" containerID="5b0f91f5eeff936fbe0fe160ee20c396ff099899ce29f5cdd4fa1f12d6ea3004" Dec 06 15:53:51 crc kubenswrapper[5003]: I1206 15:53:51.746265 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="horizon-kuttl-tests/rabbitmq-server-0" Dec 06 15:53:51 crc kubenswrapper[5003]: I1206 15:53:51.752041 5003 generic.go:334] "Generic (PLEG): container finished" podID="ff5afab4-f287-43ad-bf14-7ac8c90a52e3" containerID="42f4134bfb546e8795f6c7a557eac50c015cd43f87e79a13c6d099d6721f0418" exitCode=0 Dec 06 15:53:51 crc kubenswrapper[5003]: I1206 15:53:51.752223 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-index-4qn9h" event={"ID":"ff5afab4-f287-43ad-bf14-7ac8c90a52e3","Type":"ContainerDied","Data":"42f4134bfb546e8795f6c7a557eac50c015cd43f87e79a13c6d099d6721f0418"} Dec 06 15:53:51 crc kubenswrapper[5003]: I1206 15:53:51.756657 5003 generic.go:334] "Generic (PLEG): container finished" podID="72422d98-0d90-4087-8aae-e78370f932b1" containerID="4bc3c2e758d8982d595f10a53008c877c956e5fc303fbcb4c027c58fa9a1e430" exitCode=0 Dec 06 15:53:51 crc kubenswrapper[5003]: I1206 15:53:51.756827 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="horizon-kuttl-tests/openstack-galera-1" event={"ID":"72422d98-0d90-4087-8aae-e78370f932b1","Type":"ContainerDied","Data":"4bc3c2e758d8982d595f10a53008c877c956e5fc303fbcb4c027c58fa9a1e430"} Dec 06 15:53:51 crc kubenswrapper[5003]: I1206 15:53:51.762105 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="horizon-kuttl-tests/keystonefdf1-account-delete-mqw6h" event={"ID":"ef1241df-7f65-49b6-b681-e332717e0d88","Type":"ContainerDied","Data":"56d9c42eab74970835ebbbc3f0cb310f5cdbe3ab0fbde24cc47a407908185485"} Dec 06 15:53:51 crc kubenswrapper[5003]: I1206 15:53:51.762372 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="horizon-kuttl-tests/keystonefdf1-account-delete-mqw6h" Dec 06 15:53:51 crc kubenswrapper[5003]: I1206 15:53:51.770129 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["horizon-kuttl-tests/rabbitmq-server-0"] Dec 06 15:53:51 crc kubenswrapper[5003]: I1206 15:53:51.771060 5003 scope.go:117] "RemoveContainer" containerID="6e16f162181f4e0cf41ba4628b056e20a6058930d1b6b01cd8815905709818b5" Dec 06 15:53:51 crc kubenswrapper[5003]: I1206 15:53:51.771622 5003 generic.go:334] "Generic (PLEG): container finished" podID="eb973b37-d488-4739-9c25-96885cc3158b" containerID="d4e5fe15f47d713486a4ea61a9d4517d28fc3ca1358eaf680cc145c4fa329d32" exitCode=0 Dec 06 15:53:51 crc kubenswrapper[5003]: I1206 15:53:51.771657 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-7fb9dff8cd-l4z2z" Dec 06 15:53:51 crc kubenswrapper[5003]: I1206 15:53:51.771673 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-7fb9dff8cd-l4z2z" event={"ID":"eb973b37-d488-4739-9c25-96885cc3158b","Type":"ContainerDied","Data":"d4e5fe15f47d713486a4ea61a9d4517d28fc3ca1358eaf680cc145c4fa329d32"} Dec 06 15:53:51 crc kubenswrapper[5003]: I1206 15:53:51.771702 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-7fb9dff8cd-l4z2z" event={"ID":"eb973b37-d488-4739-9c25-96885cc3158b","Type":"ContainerDied","Data":"57989e3f5fd9397d579d20768ffd92e032a0245838e03851cb9fdd6f2406107f"} Dec 06 15:53:51 crc kubenswrapper[5003]: I1206 15:53:51.779956 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/eb973b37-d488-4739-9c25-96885cc3158b-apiservice-cert\") pod \"eb973b37-d488-4739-9c25-96885cc3158b\" (UID: \"eb973b37-d488-4739-9c25-96885cc3158b\") " Dec 06 15:53:51 crc kubenswrapper[5003]: I1206 15:53:51.780070 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kbz5d\" (UniqueName: \"kubernetes.io/projected/eb973b37-d488-4739-9c25-96885cc3158b-kube-api-access-kbz5d\") pod \"eb973b37-d488-4739-9c25-96885cc3158b\" (UID: \"eb973b37-d488-4739-9c25-96885cc3158b\") " Dec 06 15:53:51 crc kubenswrapper[5003]: I1206 15:53:51.780090 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/eb973b37-d488-4739-9c25-96885cc3158b-webhook-cert\") pod \"eb973b37-d488-4739-9c25-96885cc3158b\" (UID: \"eb973b37-d488-4739-9c25-96885cc3158b\") " Dec 06 15:53:51 crc kubenswrapper[5003]: I1206 15:53:51.780382 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4zqpc\" (UniqueName: \"kubernetes.io/projected/ef1241df-7f65-49b6-b681-e332717e0d88-kube-api-access-4zqpc\") on node \"crc\" DevicePath \"\"" Dec 06 15:53:51 crc kubenswrapper[5003]: I1206 15:53:51.780400 5003 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ef1241df-7f65-49b6-b681-e332717e0d88-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 06 15:53:51 crc kubenswrapper[5003]: I1206 15:53:51.784189 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["horizon-kuttl-tests/rabbitmq-server-0"] Dec 06 15:53:51 crc kubenswrapper[5003]: I1206 15:53:51.784275 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/eb973b37-d488-4739-9c25-96885cc3158b-kube-api-access-kbz5d" (OuterVolumeSpecName: "kube-api-access-kbz5d") pod "eb973b37-d488-4739-9c25-96885cc3158b" (UID: "eb973b37-d488-4739-9c25-96885cc3158b"). InnerVolumeSpecName "kube-api-access-kbz5d". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 15:53:51 crc kubenswrapper[5003]: I1206 15:53:51.784460 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/eb973b37-d488-4739-9c25-96885cc3158b-apiservice-cert" (OuterVolumeSpecName: "apiservice-cert") pod "eb973b37-d488-4739-9c25-96885cc3158b" (UID: "eb973b37-d488-4739-9c25-96885cc3158b"). InnerVolumeSpecName "apiservice-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 15:53:51 crc kubenswrapper[5003]: I1206 15:53:51.789897 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/eb973b37-d488-4739-9c25-96885cc3158b-webhook-cert" (OuterVolumeSpecName: "webhook-cert") pod "eb973b37-d488-4739-9c25-96885cc3158b" (UID: "eb973b37-d488-4739-9c25-96885cc3158b"). InnerVolumeSpecName "webhook-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 15:53:51 crc kubenswrapper[5003]: I1206 15:53:51.798760 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["horizon-kuttl-tests/keystonefdf1-account-delete-mqw6h"] Dec 06 15:53:51 crc kubenswrapper[5003]: I1206 15:53:51.801554 5003 scope.go:117] "RemoveContainer" containerID="9406822b4eabb53203268b24969408fe9fadbcac0c16c8f82eaf64e798133866" Dec 06 15:53:51 crc kubenswrapper[5003]: I1206 15:53:51.803368 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["horizon-kuttl-tests/keystonefdf1-account-delete-mqw6h"] Dec 06 15:53:51 crc kubenswrapper[5003]: I1206 15:53:51.818834 5003 scope.go:117] "RemoveContainer" containerID="d4e5fe15f47d713486a4ea61a9d4517d28fc3ca1358eaf680cc145c4fa329d32" Dec 06 15:53:51 crc kubenswrapper[5003]: I1206 15:53:51.833628 5003 scope.go:117] "RemoveContainer" containerID="d4e5fe15f47d713486a4ea61a9d4517d28fc3ca1358eaf680cc145c4fa329d32" Dec 06 15:53:51 crc kubenswrapper[5003]: E1206 15:53:51.834079 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d4e5fe15f47d713486a4ea61a9d4517d28fc3ca1358eaf680cc145c4fa329d32\": container with ID starting with d4e5fe15f47d713486a4ea61a9d4517d28fc3ca1358eaf680cc145c4fa329d32 not found: ID does not exist" containerID="d4e5fe15f47d713486a4ea61a9d4517d28fc3ca1358eaf680cc145c4fa329d32" Dec 06 15:53:51 crc kubenswrapper[5003]: I1206 15:53:51.834219 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d4e5fe15f47d713486a4ea61a9d4517d28fc3ca1358eaf680cc145c4fa329d32"} err="failed to get container status \"d4e5fe15f47d713486a4ea61a9d4517d28fc3ca1358eaf680cc145c4fa329d32\": rpc error: code = NotFound desc = could not find container \"d4e5fe15f47d713486a4ea61a9d4517d28fc3ca1358eaf680cc145c4fa329d32\": container with ID starting with d4e5fe15f47d713486a4ea61a9d4517d28fc3ca1358eaf680cc145c4fa329d32 not found: ID does not exist" Dec 06 15:53:51 crc kubenswrapper[5003]: I1206 15:53:51.896079 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kbz5d\" (UniqueName: \"kubernetes.io/projected/eb973b37-d488-4739-9c25-96885cc3158b-kube-api-access-kbz5d\") on node \"crc\" DevicePath \"\"" Dec 06 15:53:51 crc kubenswrapper[5003]: I1206 15:53:51.896331 5003 reconciler_common.go:293] "Volume detached for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/eb973b37-d488-4739-9c25-96885cc3158b-webhook-cert\") on node \"crc\" DevicePath \"\"" Dec 06 15:53:51 crc kubenswrapper[5003]: I1206 15:53:51.896420 5003 reconciler_common.go:293] "Volume detached for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/eb973b37-d488-4739-9c25-96885cc3158b-apiservice-cert\") on node \"crc\" DevicePath \"\"" Dec 06 15:53:52 crc kubenswrapper[5003]: I1206 15:53:52.102175 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-7fb9dff8cd-l4z2z"] Dec 06 15:53:52 crc kubenswrapper[5003]: I1206 15:53:52.106762 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-7fb9dff8cd-l4z2z"] Dec 06 15:53:52 crc kubenswrapper[5003]: I1206 15:53:52.416992 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="horizon-kuttl-tests/openstack-galera-1" Dec 06 15:53:52 crc kubenswrapper[5003]: I1206 15:53:52.465394 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-index-4qn9h" Dec 06 15:53:52 crc kubenswrapper[5003]: I1206 15:53:52.510083 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/72422d98-0d90-4087-8aae-e78370f932b1-operator-scripts\") pod \"72422d98-0d90-4087-8aae-e78370f932b1\" (UID: \"72422d98-0d90-4087-8aae-e78370f932b1\") " Dec 06 15:53:52 crc kubenswrapper[5003]: I1206 15:53:52.510204 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2f6l6\" (UniqueName: \"kubernetes.io/projected/72422d98-0d90-4087-8aae-e78370f932b1-kube-api-access-2f6l6\") pod \"72422d98-0d90-4087-8aae-e78370f932b1\" (UID: \"72422d98-0d90-4087-8aae-e78370f932b1\") " Dec 06 15:53:52 crc kubenswrapper[5003]: I1206 15:53:52.510298 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ktc25\" (UniqueName: \"kubernetes.io/projected/ff5afab4-f287-43ad-bf14-7ac8c90a52e3-kube-api-access-ktc25\") pod \"ff5afab4-f287-43ad-bf14-7ac8c90a52e3\" (UID: \"ff5afab4-f287-43ad-bf14-7ac8c90a52e3\") " Dec 06 15:53:52 crc kubenswrapper[5003]: I1206 15:53:52.510389 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/72422d98-0d90-4087-8aae-e78370f932b1-kolla-config\") pod \"72422d98-0d90-4087-8aae-e78370f932b1\" (UID: \"72422d98-0d90-4087-8aae-e78370f932b1\") " Dec 06 15:53:52 crc kubenswrapper[5003]: I1206 15:53:52.510416 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/72422d98-0d90-4087-8aae-e78370f932b1-config-data-default\") pod \"72422d98-0d90-4087-8aae-e78370f932b1\" (UID: \"72422d98-0d90-4087-8aae-e78370f932b1\") " Dec 06 15:53:52 crc kubenswrapper[5003]: I1206 15:53:52.510479 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/72422d98-0d90-4087-8aae-e78370f932b1-config-data-generated\") pod \"72422d98-0d90-4087-8aae-e78370f932b1\" (UID: \"72422d98-0d90-4087-8aae-e78370f932b1\") " Dec 06 15:53:52 crc kubenswrapper[5003]: I1206 15:53:52.510536 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mysql-db\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"72422d98-0d90-4087-8aae-e78370f932b1\" (UID: \"72422d98-0d90-4087-8aae-e78370f932b1\") " Dec 06 15:53:52 crc kubenswrapper[5003]: I1206 15:53:52.513266 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/72422d98-0d90-4087-8aae-e78370f932b1-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "72422d98-0d90-4087-8aae-e78370f932b1" (UID: "72422d98-0d90-4087-8aae-e78370f932b1"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 15:53:52 crc kubenswrapper[5003]: I1206 15:53:52.513557 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/72422d98-0d90-4087-8aae-e78370f932b1-config-data-default" (OuterVolumeSpecName: "config-data-default") pod "72422d98-0d90-4087-8aae-e78370f932b1" (UID: "72422d98-0d90-4087-8aae-e78370f932b1"). InnerVolumeSpecName "config-data-default". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 15:53:52 crc kubenswrapper[5003]: I1206 15:53:52.513777 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/72422d98-0d90-4087-8aae-e78370f932b1-config-data-generated" (OuterVolumeSpecName: "config-data-generated") pod "72422d98-0d90-4087-8aae-e78370f932b1" (UID: "72422d98-0d90-4087-8aae-e78370f932b1"). InnerVolumeSpecName "config-data-generated". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 15:53:52 crc kubenswrapper[5003]: I1206 15:53:52.513867 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/72422d98-0d90-4087-8aae-e78370f932b1-kolla-config" (OuterVolumeSpecName: "kolla-config") pod "72422d98-0d90-4087-8aae-e78370f932b1" (UID: "72422d98-0d90-4087-8aae-e78370f932b1"). InnerVolumeSpecName "kolla-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 15:53:52 crc kubenswrapper[5003]: I1206 15:53:52.517653 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/72422d98-0d90-4087-8aae-e78370f932b1-kube-api-access-2f6l6" (OuterVolumeSpecName: "kube-api-access-2f6l6") pod "72422d98-0d90-4087-8aae-e78370f932b1" (UID: "72422d98-0d90-4087-8aae-e78370f932b1"). InnerVolumeSpecName "kube-api-access-2f6l6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 15:53:52 crc kubenswrapper[5003]: I1206 15:53:52.524136 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage07-crc" (OuterVolumeSpecName: "mysql-db") pod "72422d98-0d90-4087-8aae-e78370f932b1" (UID: "72422d98-0d90-4087-8aae-e78370f932b1"). InnerVolumeSpecName "local-storage07-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 06 15:53:52 crc kubenswrapper[5003]: I1206 15:53:52.531377 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ff5afab4-f287-43ad-bf14-7ac8c90a52e3-kube-api-access-ktc25" (OuterVolumeSpecName: "kube-api-access-ktc25") pod "ff5afab4-f287-43ad-bf14-7ac8c90a52e3" (UID: "ff5afab4-f287-43ad-bf14-7ac8c90a52e3"). InnerVolumeSpecName "kube-api-access-ktc25". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 15:53:52 crc kubenswrapper[5003]: I1206 15:53:52.611897 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2f6l6\" (UniqueName: \"kubernetes.io/projected/72422d98-0d90-4087-8aae-e78370f932b1-kube-api-access-2f6l6\") on node \"crc\" DevicePath \"\"" Dec 06 15:53:52 crc kubenswrapper[5003]: I1206 15:53:52.611931 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ktc25\" (UniqueName: \"kubernetes.io/projected/ff5afab4-f287-43ad-bf14-7ac8c90a52e3-kube-api-access-ktc25\") on node \"crc\" DevicePath \"\"" Dec 06 15:53:52 crc kubenswrapper[5003]: I1206 15:53:52.611941 5003 reconciler_common.go:293] "Volume detached for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/72422d98-0d90-4087-8aae-e78370f932b1-kolla-config\") on node \"crc\" DevicePath \"\"" Dec 06 15:53:52 crc kubenswrapper[5003]: I1206 15:53:52.611952 5003 reconciler_common.go:293] "Volume detached for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/72422d98-0d90-4087-8aae-e78370f932b1-config-data-default\") on node \"crc\" DevicePath \"\"" Dec 06 15:53:52 crc kubenswrapper[5003]: I1206 15:53:52.611960 5003 reconciler_common.go:293] "Volume detached for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/72422d98-0d90-4087-8aae-e78370f932b1-config-data-generated\") on node \"crc\" DevicePath \"\"" Dec 06 15:53:52 crc kubenswrapper[5003]: I1206 15:53:52.611995 5003 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") on node \"crc\" " Dec 06 15:53:52 crc kubenswrapper[5003]: I1206 15:53:52.612004 5003 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/72422d98-0d90-4087-8aae-e78370f932b1-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 06 15:53:52 crc kubenswrapper[5003]: I1206 15:53:52.622274 5003 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage07-crc" (UniqueName: "kubernetes.io/local-volume/local-storage07-crc") on node "crc" Dec 06 15:53:52 crc kubenswrapper[5003]: I1206 15:53:52.712867 5003 reconciler_common.go:293] "Volume detached for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") on node \"crc\" DevicePath \"\"" Dec 06 15:53:52 crc kubenswrapper[5003]: I1206 15:53:52.781789 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-index-4qn9h" event={"ID":"ff5afab4-f287-43ad-bf14-7ac8c90a52e3","Type":"ContainerDied","Data":"3891a655096b5666baa35b4f56280ad80389a779d56aaa840ffee84b29979573"} Dec 06 15:53:52 crc kubenswrapper[5003]: I1206 15:53:52.781842 5003 scope.go:117] "RemoveContainer" containerID="42f4134bfb546e8795f6c7a557eac50c015cd43f87e79a13c6d099d6721f0418" Dec 06 15:53:52 crc kubenswrapper[5003]: I1206 15:53:52.781957 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-index-4qn9h" Dec 06 15:53:52 crc kubenswrapper[5003]: I1206 15:53:52.785672 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="horizon-kuttl-tests/openstack-galera-1" event={"ID":"72422d98-0d90-4087-8aae-e78370f932b1","Type":"ContainerDied","Data":"8a09fa2ffad21ad150ebbe9f0910a228b7ffd3807a631b55e1adf579c5f15e5a"} Dec 06 15:53:52 crc kubenswrapper[5003]: I1206 15:53:52.785918 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="horizon-kuttl-tests/openstack-galera-1" Dec 06 15:53:52 crc kubenswrapper[5003]: I1206 15:53:52.792821 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-779fc9694b-gbbml"] Dec 06 15:53:52 crc kubenswrapper[5003]: I1206 15:53:52.793091 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/rabbitmq-cluster-operator-779fc9694b-gbbml" podUID="b87b54c4-3c22-4d05-b500-14a1cc8e99bb" containerName="operator" containerID="cri-o://6e72a8f902d950826f857587b6917fc5c30e9e42ade348f050a329d8ae50dddb" gracePeriod=10 Dec 06 15:53:52 crc kubenswrapper[5003]: I1206 15:53:52.977429 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/keystone-operator-index-4qn9h"] Dec 06 15:53:52 crc kubenswrapper[5003]: I1206 15:53:52.991297 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/keystone-operator-index-4qn9h"] Dec 06 15:53:52 crc kubenswrapper[5003]: I1206 15:53:52.995905 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["horizon-kuttl-tests/openstack-galera-1"] Dec 06 15:53:53 crc kubenswrapper[5003]: I1206 15:53:53.002355 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["horizon-kuttl-tests/openstack-galera-1"] Dec 06 15:53:53 crc kubenswrapper[5003]: I1206 15:53:53.005672 5003 scope.go:117] "RemoveContainer" containerID="4bc3c2e758d8982d595f10a53008c877c956e5fc303fbcb4c027c58fa9a1e430" Dec 06 15:53:53 crc kubenswrapper[5003]: I1206 15:53:53.070690 5003 scope.go:117] "RemoveContainer" containerID="41205e827227e20713cac2571c5fdf50dcc7dd7db3330f432ed8a4526fe4ba58" Dec 06 15:53:53 crc kubenswrapper[5003]: I1206 15:53:53.074800 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="horizon-kuttl-tests/openstack-galera-0" Dec 06 15:53:53 crc kubenswrapper[5003]: I1206 15:53:53.089060 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-index-cvt2f"] Dec 06 15:53:53 crc kubenswrapper[5003]: I1206 15:53:53.089277 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/rabbitmq-cluster-operator-index-cvt2f" podUID="9527a213-4c9a-4477-9876-1b8572119c9a" containerName="registry-server" containerID="cri-o://2c08ff6901571d98aeae0c6c15cd5e31caab7c4427674a10ce65f40b92e74d62" gracePeriod=30 Dec 06 15:53:53 crc kubenswrapper[5003]: I1206 15:53:53.120924 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590fzkbm"] Dec 06 15:53:53 crc kubenswrapper[5003]: I1206 15:53:53.121315 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6b45e98d-ae85-45e0-b565-11db9addcad3-operator-scripts\") pod \"6b45e98d-ae85-45e0-b565-11db9addcad3\" (UID: \"6b45e98d-ae85-45e0-b565-11db9addcad3\") " Dec 06 15:53:53 crc kubenswrapper[5003]: I1206 15:53:53.121396 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/6b45e98d-ae85-45e0-b565-11db9addcad3-kolla-config\") pod \"6b45e98d-ae85-45e0-b565-11db9addcad3\" (UID: \"6b45e98d-ae85-45e0-b565-11db9addcad3\") " Dec 06 15:53:53 crc kubenswrapper[5003]: I1206 15:53:53.121442 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bcl4k\" (UniqueName: \"kubernetes.io/projected/6b45e98d-ae85-45e0-b565-11db9addcad3-kube-api-access-bcl4k\") pod \"6b45e98d-ae85-45e0-b565-11db9addcad3\" (UID: \"6b45e98d-ae85-45e0-b565-11db9addcad3\") " Dec 06 15:53:53 crc kubenswrapper[5003]: I1206 15:53:53.121480 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mysql-db\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"6b45e98d-ae85-45e0-b565-11db9addcad3\" (UID: \"6b45e98d-ae85-45e0-b565-11db9addcad3\") " Dec 06 15:53:53 crc kubenswrapper[5003]: I1206 15:53:53.121545 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/6b45e98d-ae85-45e0-b565-11db9addcad3-config-data-default\") pod \"6b45e98d-ae85-45e0-b565-11db9addcad3\" (UID: \"6b45e98d-ae85-45e0-b565-11db9addcad3\") " Dec 06 15:53:53 crc kubenswrapper[5003]: I1206 15:53:53.121582 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/6b45e98d-ae85-45e0-b565-11db9addcad3-config-data-generated\") pod \"6b45e98d-ae85-45e0-b565-11db9addcad3\" (UID: \"6b45e98d-ae85-45e0-b565-11db9addcad3\") " Dec 06 15:53:53 crc kubenswrapper[5003]: I1206 15:53:53.121891 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6b45e98d-ae85-45e0-b565-11db9addcad3-kolla-config" (OuterVolumeSpecName: "kolla-config") pod "6b45e98d-ae85-45e0-b565-11db9addcad3" (UID: "6b45e98d-ae85-45e0-b565-11db9addcad3"). InnerVolumeSpecName "kolla-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 15:53:53 crc kubenswrapper[5003]: I1206 15:53:53.122213 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6b45e98d-ae85-45e0-b565-11db9addcad3-config-data-default" (OuterVolumeSpecName: "config-data-default") pod "6b45e98d-ae85-45e0-b565-11db9addcad3" (UID: "6b45e98d-ae85-45e0-b565-11db9addcad3"). InnerVolumeSpecName "config-data-default". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 15:53:53 crc kubenswrapper[5003]: I1206 15:53:53.122285 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6b45e98d-ae85-45e0-b565-11db9addcad3-config-data-generated" (OuterVolumeSpecName: "config-data-generated") pod "6b45e98d-ae85-45e0-b565-11db9addcad3" (UID: "6b45e98d-ae85-45e0-b565-11db9addcad3"). InnerVolumeSpecName "config-data-generated". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 15:53:53 crc kubenswrapper[5003]: I1206 15:53:53.122375 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6b45e98d-ae85-45e0-b565-11db9addcad3-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "6b45e98d-ae85-45e0-b565-11db9addcad3" (UID: "6b45e98d-ae85-45e0-b565-11db9addcad3"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 15:53:53 crc kubenswrapper[5003]: I1206 15:53:53.125169 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590fzkbm"] Dec 06 15:53:53 crc kubenswrapper[5003]: I1206 15:53:53.125670 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6b45e98d-ae85-45e0-b565-11db9addcad3-kube-api-access-bcl4k" (OuterVolumeSpecName: "kube-api-access-bcl4k") pod "6b45e98d-ae85-45e0-b565-11db9addcad3" (UID: "6b45e98d-ae85-45e0-b565-11db9addcad3"). InnerVolumeSpecName "kube-api-access-bcl4k". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 15:53:53 crc kubenswrapper[5003]: I1206 15:53:53.134075 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage11-crc" (OuterVolumeSpecName: "mysql-db") pod "6b45e98d-ae85-45e0-b565-11db9addcad3" (UID: "6b45e98d-ae85-45e0-b565-11db9addcad3"). InnerVolumeSpecName "local-storage11-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 06 15:53:53 crc kubenswrapper[5003]: I1206 15:53:53.228619 5003 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6b45e98d-ae85-45e0-b565-11db9addcad3-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 06 15:53:53 crc kubenswrapper[5003]: I1206 15:53:53.228660 5003 reconciler_common.go:293] "Volume detached for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/6b45e98d-ae85-45e0-b565-11db9addcad3-kolla-config\") on node \"crc\" DevicePath \"\"" Dec 06 15:53:53 crc kubenswrapper[5003]: I1206 15:53:53.228675 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bcl4k\" (UniqueName: \"kubernetes.io/projected/6b45e98d-ae85-45e0-b565-11db9addcad3-kube-api-access-bcl4k\") on node \"crc\" DevicePath \"\"" Dec 06 15:53:53 crc kubenswrapper[5003]: I1206 15:53:53.228720 5003 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") on node \"crc\" " Dec 06 15:53:53 crc kubenswrapper[5003]: I1206 15:53:53.228733 5003 reconciler_common.go:293] "Volume detached for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/6b45e98d-ae85-45e0-b565-11db9addcad3-config-data-default\") on node \"crc\" DevicePath \"\"" Dec 06 15:53:53 crc kubenswrapper[5003]: I1206 15:53:53.228746 5003 reconciler_common.go:293] "Volume detached for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/6b45e98d-ae85-45e0-b565-11db9addcad3-config-data-generated\") on node \"crc\" DevicePath \"\"" Dec 06 15:53:53 crc kubenswrapper[5003]: I1206 15:53:53.240684 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-779fc9694b-gbbml" Dec 06 15:53:53 crc kubenswrapper[5003]: I1206 15:53:53.247418 5003 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage11-crc" (UniqueName: "kubernetes.io/local-volume/local-storage11-crc") on node "crc" Dec 06 15:53:53 crc kubenswrapper[5003]: I1206 15:53:53.329392 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zxdvq\" (UniqueName: \"kubernetes.io/projected/b87b54c4-3c22-4d05-b500-14a1cc8e99bb-kube-api-access-zxdvq\") pod \"b87b54c4-3c22-4d05-b500-14a1cc8e99bb\" (UID: \"b87b54c4-3c22-4d05-b500-14a1cc8e99bb\") " Dec 06 15:53:53 crc kubenswrapper[5003]: I1206 15:53:53.329814 5003 reconciler_common.go:293] "Volume detached for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") on node \"crc\" DevicePath \"\"" Dec 06 15:53:53 crc kubenswrapper[5003]: I1206 15:53:53.374659 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b87b54c4-3c22-4d05-b500-14a1cc8e99bb-kube-api-access-zxdvq" (OuterVolumeSpecName: "kube-api-access-zxdvq") pod "b87b54c4-3c22-4d05-b500-14a1cc8e99bb" (UID: "b87b54c4-3c22-4d05-b500-14a1cc8e99bb"). InnerVolumeSpecName "kube-api-access-zxdvq". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 15:53:53 crc kubenswrapper[5003]: I1206 15:53:53.431014 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zxdvq\" (UniqueName: \"kubernetes.io/projected/b87b54c4-3c22-4d05-b500-14a1cc8e99bb-kube-api-access-zxdvq\") on node \"crc\" DevicePath \"\"" Dec 06 15:53:53 crc kubenswrapper[5003]: I1206 15:53:53.522860 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-index-cvt2f" Dec 06 15:53:53 crc kubenswrapper[5003]: I1206 15:53:53.635370 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tz4b2\" (UniqueName: \"kubernetes.io/projected/9527a213-4c9a-4477-9876-1b8572119c9a-kube-api-access-tz4b2\") pod \"9527a213-4c9a-4477-9876-1b8572119c9a\" (UID: \"9527a213-4c9a-4477-9876-1b8572119c9a\") " Dec 06 15:53:53 crc kubenswrapper[5003]: I1206 15:53:53.639231 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9527a213-4c9a-4477-9876-1b8572119c9a-kube-api-access-tz4b2" (OuterVolumeSpecName: "kube-api-access-tz4b2") pod "9527a213-4c9a-4477-9876-1b8572119c9a" (UID: "9527a213-4c9a-4477-9876-1b8572119c9a"). InnerVolumeSpecName "kube-api-access-tz4b2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 15:53:53 crc kubenswrapper[5003]: I1206 15:53:53.718906 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="20390cae-6054-4766-8ff4-48402fd00916" path="/var/lib/kubelet/pods/20390cae-6054-4766-8ff4-48402fd00916/volumes" Dec 06 15:53:53 crc kubenswrapper[5003]: I1206 15:53:53.719738 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="27124a81-a0ad-4bc6-ad89-d2f5738570bc" path="/var/lib/kubelet/pods/27124a81-a0ad-4bc6-ad89-d2f5738570bc/volumes" Dec 06 15:53:53 crc kubenswrapper[5003]: I1206 15:53:53.720385 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="72422d98-0d90-4087-8aae-e78370f932b1" path="/var/lib/kubelet/pods/72422d98-0d90-4087-8aae-e78370f932b1/volumes" Dec 06 15:53:53 crc kubenswrapper[5003]: I1206 15:53:53.720999 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="eb973b37-d488-4739-9c25-96885cc3158b" path="/var/lib/kubelet/pods/eb973b37-d488-4739-9c25-96885cc3158b/volumes" Dec 06 15:53:53 crc kubenswrapper[5003]: I1206 15:53:53.721514 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ef1241df-7f65-49b6-b681-e332717e0d88" path="/var/lib/kubelet/pods/ef1241df-7f65-49b6-b681-e332717e0d88/volumes" Dec 06 15:53:53 crc kubenswrapper[5003]: I1206 15:53:53.721931 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ff5afab4-f287-43ad-bf14-7ac8c90a52e3" path="/var/lib/kubelet/pods/ff5afab4-f287-43ad-bf14-7ac8c90a52e3/volumes" Dec 06 15:53:53 crc kubenswrapper[5003]: I1206 15:53:53.736871 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tz4b2\" (UniqueName: \"kubernetes.io/projected/9527a213-4c9a-4477-9876-1b8572119c9a-kube-api-access-tz4b2\") on node \"crc\" DevicePath \"\"" Dec 06 15:53:53 crc kubenswrapper[5003]: I1206 15:53:53.796018 5003 generic.go:334] "Generic (PLEG): container finished" podID="b87b54c4-3c22-4d05-b500-14a1cc8e99bb" containerID="6e72a8f902d950826f857587b6917fc5c30e9e42ade348f050a329d8ae50dddb" exitCode=0 Dec 06 15:53:53 crc kubenswrapper[5003]: I1206 15:53:53.796059 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-779fc9694b-gbbml" event={"ID":"b87b54c4-3c22-4d05-b500-14a1cc8e99bb","Type":"ContainerDied","Data":"6e72a8f902d950826f857587b6917fc5c30e9e42ade348f050a329d8ae50dddb"} Dec 06 15:53:53 crc kubenswrapper[5003]: I1206 15:53:53.796089 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-779fc9694b-gbbml" Dec 06 15:53:53 crc kubenswrapper[5003]: I1206 15:53:53.796113 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-779fc9694b-gbbml" event={"ID":"b87b54c4-3c22-4d05-b500-14a1cc8e99bb","Type":"ContainerDied","Data":"1c27f2304d21d1006d83c9089945b2d5a09602f99bfa434e1f43cbaafe25a696"} Dec 06 15:53:53 crc kubenswrapper[5003]: I1206 15:53:53.796138 5003 scope.go:117] "RemoveContainer" containerID="6e72a8f902d950826f857587b6917fc5c30e9e42ade348f050a329d8ae50dddb" Dec 06 15:53:53 crc kubenswrapper[5003]: I1206 15:53:53.798847 5003 generic.go:334] "Generic (PLEG): container finished" podID="6b45e98d-ae85-45e0-b565-11db9addcad3" containerID="133fc0f206a2ac359eedda713017b70e001923ecd02badee3cfce4e2f77bbc57" exitCode=0 Dec 06 15:53:53 crc kubenswrapper[5003]: I1206 15:53:53.799017 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="horizon-kuttl-tests/openstack-galera-0" event={"ID":"6b45e98d-ae85-45e0-b565-11db9addcad3","Type":"ContainerDied","Data":"133fc0f206a2ac359eedda713017b70e001923ecd02badee3cfce4e2f77bbc57"} Dec 06 15:53:53 crc kubenswrapper[5003]: I1206 15:53:53.799286 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="horizon-kuttl-tests/openstack-galera-0" event={"ID":"6b45e98d-ae85-45e0-b565-11db9addcad3","Type":"ContainerDied","Data":"0dcc429a9bf15f3ced249fd46aee662a76d8c5492810dc6c36dbd9a491f163c6"} Dec 06 15:53:53 crc kubenswrapper[5003]: I1206 15:53:53.799102 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="horizon-kuttl-tests/openstack-galera-0" Dec 06 15:53:53 crc kubenswrapper[5003]: I1206 15:53:53.803577 5003 generic.go:334] "Generic (PLEG): container finished" podID="9527a213-4c9a-4477-9876-1b8572119c9a" containerID="2c08ff6901571d98aeae0c6c15cd5e31caab7c4427674a10ce65f40b92e74d62" exitCode=0 Dec 06 15:53:53 crc kubenswrapper[5003]: I1206 15:53:53.803648 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-index-cvt2f" Dec 06 15:53:53 crc kubenswrapper[5003]: I1206 15:53:53.803724 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-index-cvt2f" event={"ID":"9527a213-4c9a-4477-9876-1b8572119c9a","Type":"ContainerDied","Data":"2c08ff6901571d98aeae0c6c15cd5e31caab7c4427674a10ce65f40b92e74d62"} Dec 06 15:53:53 crc kubenswrapper[5003]: I1206 15:53:53.803790 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-index-cvt2f" event={"ID":"9527a213-4c9a-4477-9876-1b8572119c9a","Type":"ContainerDied","Data":"1d1de4f3c810c96c4ed6379f49983f08a077bcd1fe0f7986e0a481ca023bbbfe"} Dec 06 15:53:53 crc kubenswrapper[5003]: I1206 15:53:53.818372 5003 scope.go:117] "RemoveContainer" containerID="6e72a8f902d950826f857587b6917fc5c30e9e42ade348f050a329d8ae50dddb" Dec 06 15:53:53 crc kubenswrapper[5003]: E1206 15:53:53.819158 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6e72a8f902d950826f857587b6917fc5c30e9e42ade348f050a329d8ae50dddb\": container with ID starting with 6e72a8f902d950826f857587b6917fc5c30e9e42ade348f050a329d8ae50dddb not found: ID does not exist" containerID="6e72a8f902d950826f857587b6917fc5c30e9e42ade348f050a329d8ae50dddb" Dec 06 15:53:53 crc kubenswrapper[5003]: I1206 15:53:53.819278 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6e72a8f902d950826f857587b6917fc5c30e9e42ade348f050a329d8ae50dddb"} err="failed to get container status \"6e72a8f902d950826f857587b6917fc5c30e9e42ade348f050a329d8ae50dddb\": rpc error: code = NotFound desc = could not find container \"6e72a8f902d950826f857587b6917fc5c30e9e42ade348f050a329d8ae50dddb\": container with ID starting with 6e72a8f902d950826f857587b6917fc5c30e9e42ade348f050a329d8ae50dddb not found: ID does not exist" Dec 06 15:53:53 crc kubenswrapper[5003]: I1206 15:53:53.819326 5003 scope.go:117] "RemoveContainer" containerID="133fc0f206a2ac359eedda713017b70e001923ecd02badee3cfce4e2f77bbc57" Dec 06 15:53:53 crc kubenswrapper[5003]: I1206 15:53:53.821791 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-779fc9694b-gbbml"] Dec 06 15:53:53 crc kubenswrapper[5003]: I1206 15:53:53.825899 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-779fc9694b-gbbml"] Dec 06 15:53:53 crc kubenswrapper[5003]: I1206 15:53:53.845626 5003 scope.go:117] "RemoveContainer" containerID="c5094b2f8afe0c8541ddfda3379dcc608d2a6e63c745cb7108302879122eecdf" Dec 06 15:53:53 crc kubenswrapper[5003]: I1206 15:53:53.846555 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["horizon-kuttl-tests/openstack-galera-0"] Dec 06 15:53:53 crc kubenswrapper[5003]: I1206 15:53:53.864720 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["horizon-kuttl-tests/openstack-galera-0"] Dec 06 15:53:53 crc kubenswrapper[5003]: I1206 15:53:53.871852 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-index-cvt2f"] Dec 06 15:53:53 crc kubenswrapper[5003]: I1206 15:53:53.885522 5003 scope.go:117] "RemoveContainer" containerID="133fc0f206a2ac359eedda713017b70e001923ecd02badee3cfce4e2f77bbc57" Dec 06 15:53:53 crc kubenswrapper[5003]: E1206 15:53:53.886060 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"133fc0f206a2ac359eedda713017b70e001923ecd02badee3cfce4e2f77bbc57\": container with ID starting with 133fc0f206a2ac359eedda713017b70e001923ecd02badee3cfce4e2f77bbc57 not found: ID does not exist" containerID="133fc0f206a2ac359eedda713017b70e001923ecd02badee3cfce4e2f77bbc57" Dec 06 15:53:53 crc kubenswrapper[5003]: I1206 15:53:53.886129 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"133fc0f206a2ac359eedda713017b70e001923ecd02badee3cfce4e2f77bbc57"} err="failed to get container status \"133fc0f206a2ac359eedda713017b70e001923ecd02badee3cfce4e2f77bbc57\": rpc error: code = NotFound desc = could not find container \"133fc0f206a2ac359eedda713017b70e001923ecd02badee3cfce4e2f77bbc57\": container with ID starting with 133fc0f206a2ac359eedda713017b70e001923ecd02badee3cfce4e2f77bbc57 not found: ID does not exist" Dec 06 15:53:53 crc kubenswrapper[5003]: I1206 15:53:53.886186 5003 scope.go:117] "RemoveContainer" containerID="c5094b2f8afe0c8541ddfda3379dcc608d2a6e63c745cb7108302879122eecdf" Dec 06 15:53:53 crc kubenswrapper[5003]: E1206 15:53:53.886619 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c5094b2f8afe0c8541ddfda3379dcc608d2a6e63c745cb7108302879122eecdf\": container with ID starting with c5094b2f8afe0c8541ddfda3379dcc608d2a6e63c745cb7108302879122eecdf not found: ID does not exist" containerID="c5094b2f8afe0c8541ddfda3379dcc608d2a6e63c745cb7108302879122eecdf" Dec 06 15:53:53 crc kubenswrapper[5003]: I1206 15:53:53.886641 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c5094b2f8afe0c8541ddfda3379dcc608d2a6e63c745cb7108302879122eecdf"} err="failed to get container status \"c5094b2f8afe0c8541ddfda3379dcc608d2a6e63c745cb7108302879122eecdf\": rpc error: code = NotFound desc = could not find container \"c5094b2f8afe0c8541ddfda3379dcc608d2a6e63c745cb7108302879122eecdf\": container with ID starting with c5094b2f8afe0c8541ddfda3379dcc608d2a6e63c745cb7108302879122eecdf not found: ID does not exist" Dec 06 15:53:53 crc kubenswrapper[5003]: I1206 15:53:53.886658 5003 scope.go:117] "RemoveContainer" containerID="2c08ff6901571d98aeae0c6c15cd5e31caab7c4427674a10ce65f40b92e74d62" Dec 06 15:53:53 crc kubenswrapper[5003]: I1206 15:53:53.892401 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-index-cvt2f"] Dec 06 15:53:53 crc kubenswrapper[5003]: I1206 15:53:53.900937 5003 scope.go:117] "RemoveContainer" containerID="2c08ff6901571d98aeae0c6c15cd5e31caab7c4427674a10ce65f40b92e74d62" Dec 06 15:53:53 crc kubenswrapper[5003]: E1206 15:53:53.901400 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2c08ff6901571d98aeae0c6c15cd5e31caab7c4427674a10ce65f40b92e74d62\": container with ID starting with 2c08ff6901571d98aeae0c6c15cd5e31caab7c4427674a10ce65f40b92e74d62 not found: ID does not exist" containerID="2c08ff6901571d98aeae0c6c15cd5e31caab7c4427674a10ce65f40b92e74d62" Dec 06 15:53:53 crc kubenswrapper[5003]: I1206 15:53:53.901448 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2c08ff6901571d98aeae0c6c15cd5e31caab7c4427674a10ce65f40b92e74d62"} err="failed to get container status \"2c08ff6901571d98aeae0c6c15cd5e31caab7c4427674a10ce65f40b92e74d62\": rpc error: code = NotFound desc = could not find container \"2c08ff6901571d98aeae0c6c15cd5e31caab7c4427674a10ce65f40b92e74d62\": container with ID starting with 2c08ff6901571d98aeae0c6c15cd5e31caab7c4427674a10ce65f40b92e74d62 not found: ID does not exist" Dec 06 15:53:55 crc kubenswrapper[5003]: I1206 15:53:55.721785 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6b45e98d-ae85-45e0-b565-11db9addcad3" path="/var/lib/kubelet/pods/6b45e98d-ae85-45e0-b565-11db9addcad3/volumes" Dec 06 15:53:55 crc kubenswrapper[5003]: I1206 15:53:55.722529 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9527a213-4c9a-4477-9876-1b8572119c9a" path="/var/lib/kubelet/pods/9527a213-4c9a-4477-9876-1b8572119c9a/volumes" Dec 06 15:53:55 crc kubenswrapper[5003]: I1206 15:53:55.722937 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b87b54c4-3c22-4d05-b500-14a1cc8e99bb" path="/var/lib/kubelet/pods/b87b54c4-3c22-4d05-b500-14a1cc8e99bb/volumes" Dec 06 15:53:57 crc kubenswrapper[5003]: I1206 15:53:57.225976 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/infra-operator-controller-manager-5b58584fcf-h8rqq"] Dec 06 15:53:57 crc kubenswrapper[5003]: I1206 15:53:57.226502 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/infra-operator-controller-manager-5b58584fcf-h8rqq" podUID="0f4eeada-366f-4403-bd3e-54235105ef11" containerName="manager" containerID="cri-o://cdbc11c195fd2646a7c7283174463f1daa94d7c57c9b053c49590c8ed7770c9f" gracePeriod=10 Dec 06 15:53:57 crc kubenswrapper[5003]: I1206 15:53:57.440769 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/infra-operator-index-2ht4h"] Dec 06 15:53:57 crc kubenswrapper[5003]: I1206 15:53:57.441228 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/infra-operator-index-2ht4h" podUID="aedbe2bf-a5c7-42dc-80b8-2e6fe7ee9c6c" containerName="registry-server" containerID="cri-o://caeb0bee3885eb2551e9ac668861d8d908688d44cd4b75a62dc38ab441e0633c" gracePeriod=30 Dec 06 15:53:57 crc kubenswrapper[5003]: I1206 15:53:57.472253 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/7c8268737cab4bb480b8c6360540a07b7a71682e502fa0b9fb67d6a17a98tzf"] Dec 06 15:53:57 crc kubenswrapper[5003]: I1206 15:53:57.475819 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/7c8268737cab4bb480b8c6360540a07b7a71682e502fa0b9fb67d6a17a98tzf"] Dec 06 15:53:57 crc kubenswrapper[5003]: I1206 15:53:57.730593 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1d0165ce-9e29-4ace-8efe-4b89e00318f6" path="/var/lib/kubelet/pods/1d0165ce-9e29-4ace-8efe-4b89e00318f6/volumes" Dec 06 15:53:57 crc kubenswrapper[5003]: I1206 15:53:57.855749 5003 generic.go:334] "Generic (PLEG): container finished" podID="aedbe2bf-a5c7-42dc-80b8-2e6fe7ee9c6c" containerID="caeb0bee3885eb2551e9ac668861d8d908688d44cd4b75a62dc38ab441e0633c" exitCode=0 Dec 06 15:53:57 crc kubenswrapper[5003]: I1206 15:53:57.855806 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-index-2ht4h" event={"ID":"aedbe2bf-a5c7-42dc-80b8-2e6fe7ee9c6c","Type":"ContainerDied","Data":"caeb0bee3885eb2551e9ac668861d8d908688d44cd4b75a62dc38ab441e0633c"} Dec 06 15:53:57 crc kubenswrapper[5003]: I1206 15:53:57.857137 5003 generic.go:334] "Generic (PLEG): container finished" podID="0f4eeada-366f-4403-bd3e-54235105ef11" containerID="cdbc11c195fd2646a7c7283174463f1daa94d7c57c9b053c49590c8ed7770c9f" exitCode=0 Dec 06 15:53:57 crc kubenswrapper[5003]: I1206 15:53:57.857162 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-5b58584fcf-h8rqq" event={"ID":"0f4eeada-366f-4403-bd3e-54235105ef11","Type":"ContainerDied","Data":"cdbc11c195fd2646a7c7283174463f1daa94d7c57c9b053c49590c8ed7770c9f"} Dec 06 15:53:57 crc kubenswrapper[5003]: I1206 15:53:57.992703 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-index-2ht4h" Dec 06 15:53:58 crc kubenswrapper[5003]: I1206 15:53:58.063635 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l4vp8\" (UniqueName: \"kubernetes.io/projected/aedbe2bf-a5c7-42dc-80b8-2e6fe7ee9c6c-kube-api-access-l4vp8\") pod \"aedbe2bf-a5c7-42dc-80b8-2e6fe7ee9c6c\" (UID: \"aedbe2bf-a5c7-42dc-80b8-2e6fe7ee9c6c\") " Dec 06 15:53:58 crc kubenswrapper[5003]: I1206 15:53:58.069947 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/aedbe2bf-a5c7-42dc-80b8-2e6fe7ee9c6c-kube-api-access-l4vp8" (OuterVolumeSpecName: "kube-api-access-l4vp8") pod "aedbe2bf-a5c7-42dc-80b8-2e6fe7ee9c6c" (UID: "aedbe2bf-a5c7-42dc-80b8-2e6fe7ee9c6c"). InnerVolumeSpecName "kube-api-access-l4vp8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 15:53:58 crc kubenswrapper[5003]: I1206 15:53:58.165555 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l4vp8\" (UniqueName: \"kubernetes.io/projected/aedbe2bf-a5c7-42dc-80b8-2e6fe7ee9c6c-kube-api-access-l4vp8\") on node \"crc\" DevicePath \"\"" Dec 06 15:53:58 crc kubenswrapper[5003]: I1206 15:53:58.649549 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-6fd8f69c54-5f56z"] Dec 06 15:53:58 crc kubenswrapper[5003]: I1206 15:53:58.649793 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/mariadb-operator-controller-manager-6fd8f69c54-5f56z" podUID="73f33e10-66ff-41e9-97a3-e8cd2db5e39e" containerName="manager" containerID="cri-o://be04d6be433878f1997040b2c461c22a91ecd811282e70b7ae1e7fb706d37f64" gracePeriod=10 Dec 06 15:53:58 crc kubenswrapper[5003]: E1206 15:53:58.787730 5003 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod73f33e10_66ff_41e9_97a3_e8cd2db5e39e.slice/crio-be04d6be433878f1997040b2c461c22a91ecd811282e70b7ae1e7fb706d37f64.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod73f33e10_66ff_41e9_97a3_e8cd2db5e39e.slice/crio-conmon-be04d6be433878f1997040b2c461c22a91ecd811282e70b7ae1e7fb706d37f64.scope\": RecentStats: unable to find data in memory cache]" Dec 06 15:53:58 crc kubenswrapper[5003]: I1206 15:53:58.938404 5003 generic.go:334] "Generic (PLEG): container finished" podID="73f33e10-66ff-41e9-97a3-e8cd2db5e39e" containerID="be04d6be433878f1997040b2c461c22a91ecd811282e70b7ae1e7fb706d37f64" exitCode=0 Dec 06 15:53:58 crc kubenswrapper[5003]: I1206 15:53:58.938737 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-6fd8f69c54-5f56z" event={"ID":"73f33e10-66ff-41e9-97a3-e8cd2db5e39e","Type":"ContainerDied","Data":"be04d6be433878f1997040b2c461c22a91ecd811282e70b7ae1e7fb706d37f64"} Dec 06 15:53:58 crc kubenswrapper[5003]: I1206 15:53:58.951646 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-index-2ht4h" event={"ID":"aedbe2bf-a5c7-42dc-80b8-2e6fe7ee9c6c","Type":"ContainerDied","Data":"7380844aedd250e7265da4f3cdabc49e9495a0c6add87b61790bbb34b0c25938"} Dec 06 15:53:58 crc kubenswrapper[5003]: I1206 15:53:58.951708 5003 scope.go:117] "RemoveContainer" containerID="caeb0bee3885eb2551e9ac668861d8d908688d44cd4b75a62dc38ab441e0633c" Dec 06 15:53:58 crc kubenswrapper[5003]: I1206 15:53:58.951927 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-index-2ht4h" Dec 06 15:53:58 crc kubenswrapper[5003]: I1206 15:53:58.976978 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/mariadb-operator-index-42pp6"] Dec 06 15:53:58 crc kubenswrapper[5003]: I1206 15:53:58.977234 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/mariadb-operator-index-42pp6" podUID="815d4582-e47b-4d39-9c18-9886ba2a8e7d" containerName="registry-server" containerID="cri-o://7e81de4adb97b13184a2f682fe0edc64fcb44c061e4cb9cab407c46b36a311a9" gracePeriod=30 Dec 06 15:53:59 crc kubenswrapper[5003]: I1206 15:53:59.005094 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/e9e5855e5cd3b19ff946d1a783ccd6861442182df01f7778e40ab7fce76mmm5"] Dec 06 15:53:59 crc kubenswrapper[5003]: I1206 15:53:59.009007 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/e9e5855e5cd3b19ff946d1a783ccd6861442182df01f7778e40ab7fce76mmm5"] Dec 06 15:53:59 crc kubenswrapper[5003]: I1206 15:53:59.012331 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-5b58584fcf-h8rqq" Dec 06 15:53:59 crc kubenswrapper[5003]: I1206 15:53:59.059627 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/infra-operator-index-2ht4h"] Dec 06 15:53:59 crc kubenswrapper[5003]: I1206 15:53:59.078748 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/infra-operator-index-2ht4h"] Dec 06 15:53:59 crc kubenswrapper[5003]: I1206 15:53:59.079146 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9d4xl\" (UniqueName: \"kubernetes.io/projected/0f4eeada-366f-4403-bd3e-54235105ef11-kube-api-access-9d4xl\") pod \"0f4eeada-366f-4403-bd3e-54235105ef11\" (UID: \"0f4eeada-366f-4403-bd3e-54235105ef11\") " Dec 06 15:53:59 crc kubenswrapper[5003]: I1206 15:53:59.079256 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/0f4eeada-366f-4403-bd3e-54235105ef11-apiservice-cert\") pod \"0f4eeada-366f-4403-bd3e-54235105ef11\" (UID: \"0f4eeada-366f-4403-bd3e-54235105ef11\") " Dec 06 15:53:59 crc kubenswrapper[5003]: I1206 15:53:59.079307 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/0f4eeada-366f-4403-bd3e-54235105ef11-webhook-cert\") pod \"0f4eeada-366f-4403-bd3e-54235105ef11\" (UID: \"0f4eeada-366f-4403-bd3e-54235105ef11\") " Dec 06 15:53:59 crc kubenswrapper[5003]: I1206 15:53:59.083691 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0f4eeada-366f-4403-bd3e-54235105ef11-apiservice-cert" (OuterVolumeSpecName: "apiservice-cert") pod "0f4eeada-366f-4403-bd3e-54235105ef11" (UID: "0f4eeada-366f-4403-bd3e-54235105ef11"). InnerVolumeSpecName "apiservice-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 15:53:59 crc kubenswrapper[5003]: I1206 15:53:59.084522 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0f4eeada-366f-4403-bd3e-54235105ef11-kube-api-access-9d4xl" (OuterVolumeSpecName: "kube-api-access-9d4xl") pod "0f4eeada-366f-4403-bd3e-54235105ef11" (UID: "0f4eeada-366f-4403-bd3e-54235105ef11"). InnerVolumeSpecName "kube-api-access-9d4xl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 15:53:59 crc kubenswrapper[5003]: I1206 15:53:59.100993 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0f4eeada-366f-4403-bd3e-54235105ef11-webhook-cert" (OuterVolumeSpecName: "webhook-cert") pod "0f4eeada-366f-4403-bd3e-54235105ef11" (UID: "0f4eeada-366f-4403-bd3e-54235105ef11"). InnerVolumeSpecName "webhook-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 15:53:59 crc kubenswrapper[5003]: I1206 15:53:59.181893 5003 reconciler_common.go:293] "Volume detached for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/0f4eeada-366f-4403-bd3e-54235105ef11-webhook-cert\") on node \"crc\" DevicePath \"\"" Dec 06 15:53:59 crc kubenswrapper[5003]: I1206 15:53:59.181982 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9d4xl\" (UniqueName: \"kubernetes.io/projected/0f4eeada-366f-4403-bd3e-54235105ef11-kube-api-access-9d4xl\") on node \"crc\" DevicePath \"\"" Dec 06 15:53:59 crc kubenswrapper[5003]: I1206 15:53:59.181998 5003 reconciler_common.go:293] "Volume detached for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/0f4eeada-366f-4403-bd3e-54235105ef11-apiservice-cert\") on node \"crc\" DevicePath \"\"" Dec 06 15:53:59 crc kubenswrapper[5003]: I1206 15:53:59.228908 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-6fd8f69c54-5f56z" Dec 06 15:53:59 crc kubenswrapper[5003]: I1206 15:53:59.282905 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/73f33e10-66ff-41e9-97a3-e8cd2db5e39e-apiservice-cert\") pod \"73f33e10-66ff-41e9-97a3-e8cd2db5e39e\" (UID: \"73f33e10-66ff-41e9-97a3-e8cd2db5e39e\") " Dec 06 15:53:59 crc kubenswrapper[5003]: I1206 15:53:59.283053 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/73f33e10-66ff-41e9-97a3-e8cd2db5e39e-webhook-cert\") pod \"73f33e10-66ff-41e9-97a3-e8cd2db5e39e\" (UID: \"73f33e10-66ff-41e9-97a3-e8cd2db5e39e\") " Dec 06 15:53:59 crc kubenswrapper[5003]: I1206 15:53:59.283089 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5458q\" (UniqueName: \"kubernetes.io/projected/73f33e10-66ff-41e9-97a3-e8cd2db5e39e-kube-api-access-5458q\") pod \"73f33e10-66ff-41e9-97a3-e8cd2db5e39e\" (UID: \"73f33e10-66ff-41e9-97a3-e8cd2db5e39e\") " Dec 06 15:53:59 crc kubenswrapper[5003]: I1206 15:53:59.287753 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/73f33e10-66ff-41e9-97a3-e8cd2db5e39e-webhook-cert" (OuterVolumeSpecName: "webhook-cert") pod "73f33e10-66ff-41e9-97a3-e8cd2db5e39e" (UID: "73f33e10-66ff-41e9-97a3-e8cd2db5e39e"). InnerVolumeSpecName "webhook-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 15:53:59 crc kubenswrapper[5003]: I1206 15:53:59.287906 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/73f33e10-66ff-41e9-97a3-e8cd2db5e39e-apiservice-cert" (OuterVolumeSpecName: "apiservice-cert") pod "73f33e10-66ff-41e9-97a3-e8cd2db5e39e" (UID: "73f33e10-66ff-41e9-97a3-e8cd2db5e39e"). InnerVolumeSpecName "apiservice-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 15:53:59 crc kubenswrapper[5003]: I1206 15:53:59.292363 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/73f33e10-66ff-41e9-97a3-e8cd2db5e39e-kube-api-access-5458q" (OuterVolumeSpecName: "kube-api-access-5458q") pod "73f33e10-66ff-41e9-97a3-e8cd2db5e39e" (UID: "73f33e10-66ff-41e9-97a3-e8cd2db5e39e"). InnerVolumeSpecName "kube-api-access-5458q". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 15:53:59 crc kubenswrapper[5003]: I1206 15:53:59.384711 5003 reconciler_common.go:293] "Volume detached for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/73f33e10-66ff-41e9-97a3-e8cd2db5e39e-webhook-cert\") on node \"crc\" DevicePath \"\"" Dec 06 15:53:59 crc kubenswrapper[5003]: I1206 15:53:59.385062 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5458q\" (UniqueName: \"kubernetes.io/projected/73f33e10-66ff-41e9-97a3-e8cd2db5e39e-kube-api-access-5458q\") on node \"crc\" DevicePath \"\"" Dec 06 15:53:59 crc kubenswrapper[5003]: I1206 15:53:59.385076 5003 reconciler_common.go:293] "Volume detached for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/73f33e10-66ff-41e9-97a3-e8cd2db5e39e-apiservice-cert\") on node \"crc\" DevicePath \"\"" Dec 06 15:53:59 crc kubenswrapper[5003]: I1206 15:53:59.390328 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-index-42pp6" Dec 06 15:53:59 crc kubenswrapper[5003]: I1206 15:53:59.486398 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8xnjh\" (UniqueName: \"kubernetes.io/projected/815d4582-e47b-4d39-9c18-9886ba2a8e7d-kube-api-access-8xnjh\") pod \"815d4582-e47b-4d39-9c18-9886ba2a8e7d\" (UID: \"815d4582-e47b-4d39-9c18-9886ba2a8e7d\") " Dec 06 15:53:59 crc kubenswrapper[5003]: I1206 15:53:59.489726 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/815d4582-e47b-4d39-9c18-9886ba2a8e7d-kube-api-access-8xnjh" (OuterVolumeSpecName: "kube-api-access-8xnjh") pod "815d4582-e47b-4d39-9c18-9886ba2a8e7d" (UID: "815d4582-e47b-4d39-9c18-9886ba2a8e7d"). InnerVolumeSpecName "kube-api-access-8xnjh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 15:53:59 crc kubenswrapper[5003]: I1206 15:53:59.588561 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8xnjh\" (UniqueName: \"kubernetes.io/projected/815d4582-e47b-4d39-9c18-9886ba2a8e7d-kube-api-access-8xnjh\") on node \"crc\" DevicePath \"\"" Dec 06 15:53:59 crc kubenswrapper[5003]: I1206 15:53:59.719821 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6711a352-d8f2-4191-b0a5-f4c68b72d443" path="/var/lib/kubelet/pods/6711a352-d8f2-4191-b0a5-f4c68b72d443/volumes" Dec 06 15:53:59 crc kubenswrapper[5003]: I1206 15:53:59.720546 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="aedbe2bf-a5c7-42dc-80b8-2e6fe7ee9c6c" path="/var/lib/kubelet/pods/aedbe2bf-a5c7-42dc-80b8-2e6fe7ee9c6c/volumes" Dec 06 15:53:59 crc kubenswrapper[5003]: I1206 15:53:59.959994 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-6fd8f69c54-5f56z" Dec 06 15:53:59 crc kubenswrapper[5003]: I1206 15:53:59.960017 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-6fd8f69c54-5f56z" event={"ID":"73f33e10-66ff-41e9-97a3-e8cd2db5e39e","Type":"ContainerDied","Data":"eae07f18256d7c09707b55224700c6c97b5b011a21e4440d8470aa2c7f299302"} Dec 06 15:53:59 crc kubenswrapper[5003]: I1206 15:53:59.960105 5003 scope.go:117] "RemoveContainer" containerID="be04d6be433878f1997040b2c461c22a91ecd811282e70b7ae1e7fb706d37f64" Dec 06 15:53:59 crc kubenswrapper[5003]: I1206 15:53:59.961894 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-5b58584fcf-h8rqq" event={"ID":"0f4eeada-366f-4403-bd3e-54235105ef11","Type":"ContainerDied","Data":"e811cbf39e07661ffafd77652084a7946bc5f11405ba820b65723942331e3cd5"} Dec 06 15:53:59 crc kubenswrapper[5003]: I1206 15:53:59.961984 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-5b58584fcf-h8rqq" Dec 06 15:53:59 crc kubenswrapper[5003]: I1206 15:53:59.964460 5003 generic.go:334] "Generic (PLEG): container finished" podID="815d4582-e47b-4d39-9c18-9886ba2a8e7d" containerID="7e81de4adb97b13184a2f682fe0edc64fcb44c061e4cb9cab407c46b36a311a9" exitCode=0 Dec 06 15:53:59 crc kubenswrapper[5003]: I1206 15:53:59.964499 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-index-42pp6" Dec 06 15:53:59 crc kubenswrapper[5003]: I1206 15:53:59.964586 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-index-42pp6" event={"ID":"815d4582-e47b-4d39-9c18-9886ba2a8e7d","Type":"ContainerDied","Data":"7e81de4adb97b13184a2f682fe0edc64fcb44c061e4cb9cab407c46b36a311a9"} Dec 06 15:53:59 crc kubenswrapper[5003]: I1206 15:53:59.964615 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-index-42pp6" event={"ID":"815d4582-e47b-4d39-9c18-9886ba2a8e7d","Type":"ContainerDied","Data":"d1f10e00024ee189da9cd9a73741c160e5128d9aba1226208c36c4c1486949f2"} Dec 06 15:53:59 crc kubenswrapper[5003]: I1206 15:53:59.984110 5003 scope.go:117] "RemoveContainer" containerID="cdbc11c195fd2646a7c7283174463f1daa94d7c57c9b053c49590c8ed7770c9f" Dec 06 15:53:59 crc kubenswrapper[5003]: I1206 15:53:59.989125 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/mariadb-operator-index-42pp6"] Dec 06 15:53:59 crc kubenswrapper[5003]: I1206 15:53:59.996745 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/mariadb-operator-index-42pp6"] Dec 06 15:54:00 crc kubenswrapper[5003]: I1206 15:54:00.002577 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-6fd8f69c54-5f56z"] Dec 06 15:54:00 crc kubenswrapper[5003]: I1206 15:54:00.007106 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-6fd8f69c54-5f56z"] Dec 06 15:54:00 crc kubenswrapper[5003]: I1206 15:54:00.010832 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/infra-operator-controller-manager-5b58584fcf-h8rqq"] Dec 06 15:54:00 crc kubenswrapper[5003]: I1206 15:54:00.012083 5003 scope.go:117] "RemoveContainer" containerID="7e81de4adb97b13184a2f682fe0edc64fcb44c061e4cb9cab407c46b36a311a9" Dec 06 15:54:00 crc kubenswrapper[5003]: I1206 15:54:00.014128 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/infra-operator-controller-manager-5b58584fcf-h8rqq"] Dec 06 15:54:00 crc kubenswrapper[5003]: I1206 15:54:00.026480 5003 scope.go:117] "RemoveContainer" containerID="7e81de4adb97b13184a2f682fe0edc64fcb44c061e4cb9cab407c46b36a311a9" Dec 06 15:54:00 crc kubenswrapper[5003]: E1206 15:54:00.028704 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7e81de4adb97b13184a2f682fe0edc64fcb44c061e4cb9cab407c46b36a311a9\": container with ID starting with 7e81de4adb97b13184a2f682fe0edc64fcb44c061e4cb9cab407c46b36a311a9 not found: ID does not exist" containerID="7e81de4adb97b13184a2f682fe0edc64fcb44c061e4cb9cab407c46b36a311a9" Dec 06 15:54:00 crc kubenswrapper[5003]: I1206 15:54:00.028755 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7e81de4adb97b13184a2f682fe0edc64fcb44c061e4cb9cab407c46b36a311a9"} err="failed to get container status \"7e81de4adb97b13184a2f682fe0edc64fcb44c061e4cb9cab407c46b36a311a9\": rpc error: code = NotFound desc = could not find container \"7e81de4adb97b13184a2f682fe0edc64fcb44c061e4cb9cab407c46b36a311a9\": container with ID starting with 7e81de4adb97b13184a2f682fe0edc64fcb44c061e4cb9cab407c46b36a311a9 not found: ID does not exist" Dec 06 15:54:01 crc kubenswrapper[5003]: I1206 15:54:01.725770 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0f4eeada-366f-4403-bd3e-54235105ef11" path="/var/lib/kubelet/pods/0f4eeada-366f-4403-bd3e-54235105ef11/volumes" Dec 06 15:54:01 crc kubenswrapper[5003]: I1206 15:54:01.727118 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="73f33e10-66ff-41e9-97a3-e8cd2db5e39e" path="/var/lib/kubelet/pods/73f33e10-66ff-41e9-97a3-e8cd2db5e39e/volumes" Dec 06 15:54:01 crc kubenswrapper[5003]: I1206 15:54:01.728189 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="815d4582-e47b-4d39-9c18-9886ba2a8e7d" path="/var/lib/kubelet/pods/815d4582-e47b-4d39-9c18-9886ba2a8e7d/volumes" Dec 06 15:54:04 crc kubenswrapper[5003]: I1206 15:54:04.530343 5003 scope.go:117] "RemoveContainer" containerID="6e89417ac8b7e14169c40458d7c1371e81f5caf0850d9f26625e9facb8a4afdc" Dec 06 15:54:04 crc kubenswrapper[5003]: I1206 15:54:04.593973 5003 scope.go:117] "RemoveContainer" containerID="4aecbed679ade5396ff7932cc4a4f0e4badd8ac34230cdb84d59c462180240cf" Dec 06 15:54:04 crc kubenswrapper[5003]: I1206 15:54:04.616744 5003 scope.go:117] "RemoveContainer" containerID="c5d84a296a123c4f522a641bc63dd9e6225843feac9cb7f0db3b50294136766d" Dec 06 15:54:13 crc kubenswrapper[5003]: I1206 15:54:13.683375 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-nnm2r/must-gather-dq5zc"] Dec 06 15:54:13 crc kubenswrapper[5003]: E1206 15:54:13.684195 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0f4eeada-366f-4403-bd3e-54235105ef11" containerName="manager" Dec 06 15:54:13 crc kubenswrapper[5003]: I1206 15:54:13.684215 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="0f4eeada-366f-4403-bd3e-54235105ef11" containerName="manager" Dec 06 15:54:13 crc kubenswrapper[5003]: E1206 15:54:13.684233 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b87b54c4-3c22-4d05-b500-14a1cc8e99bb" containerName="operator" Dec 06 15:54:13 crc kubenswrapper[5003]: I1206 15:54:13.684240 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="b87b54c4-3c22-4d05-b500-14a1cc8e99bb" containerName="operator" Dec 06 15:54:13 crc kubenswrapper[5003]: E1206 15:54:13.684254 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9527a213-4c9a-4477-9876-1b8572119c9a" containerName="registry-server" Dec 06 15:54:13 crc kubenswrapper[5003]: I1206 15:54:13.684262 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="9527a213-4c9a-4477-9876-1b8572119c9a" containerName="registry-server" Dec 06 15:54:13 crc kubenswrapper[5003]: E1206 15:54:13.684270 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="815d4582-e47b-4d39-9c18-9886ba2a8e7d" containerName="registry-server" Dec 06 15:54:13 crc kubenswrapper[5003]: I1206 15:54:13.684277 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="815d4582-e47b-4d39-9c18-9886ba2a8e7d" containerName="registry-server" Dec 06 15:54:13 crc kubenswrapper[5003]: E1206 15:54:13.684296 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6b45e98d-ae85-45e0-b565-11db9addcad3" containerName="mysql-bootstrap" Dec 06 15:54:13 crc kubenswrapper[5003]: I1206 15:54:13.684303 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="6b45e98d-ae85-45e0-b565-11db9addcad3" containerName="mysql-bootstrap" Dec 06 15:54:13 crc kubenswrapper[5003]: E1206 15:54:13.684312 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="73f33e10-66ff-41e9-97a3-e8cd2db5e39e" containerName="manager" Dec 06 15:54:13 crc kubenswrapper[5003]: I1206 15:54:13.684320 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="73f33e10-66ff-41e9-97a3-e8cd2db5e39e" containerName="manager" Dec 06 15:54:13 crc kubenswrapper[5003]: E1206 15:54:13.684331 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aedbe2bf-a5c7-42dc-80b8-2e6fe7ee9c6c" containerName="registry-server" Dec 06 15:54:13 crc kubenswrapper[5003]: I1206 15:54:13.684339 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="aedbe2bf-a5c7-42dc-80b8-2e6fe7ee9c6c" containerName="registry-server" Dec 06 15:54:13 crc kubenswrapper[5003]: E1206 15:54:13.684347 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ef1241df-7f65-49b6-b681-e332717e0d88" containerName="mariadb-account-delete" Dec 06 15:54:13 crc kubenswrapper[5003]: I1206 15:54:13.684354 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="ef1241df-7f65-49b6-b681-e332717e0d88" containerName="mariadb-account-delete" Dec 06 15:54:13 crc kubenswrapper[5003]: E1206 15:54:13.684362 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="27124a81-a0ad-4bc6-ad89-d2f5738570bc" containerName="setup-container" Dec 06 15:54:13 crc kubenswrapper[5003]: I1206 15:54:13.684369 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="27124a81-a0ad-4bc6-ad89-d2f5738570bc" containerName="setup-container" Dec 06 15:54:13 crc kubenswrapper[5003]: E1206 15:54:13.684379 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="72422d98-0d90-4087-8aae-e78370f932b1" containerName="mysql-bootstrap" Dec 06 15:54:13 crc kubenswrapper[5003]: I1206 15:54:13.684387 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="72422d98-0d90-4087-8aae-e78370f932b1" containerName="mysql-bootstrap" Dec 06 15:54:13 crc kubenswrapper[5003]: E1206 15:54:13.684395 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eada1e4a-eb19-4b1e-868d-31d913d7b85e" containerName="manager" Dec 06 15:54:13 crc kubenswrapper[5003]: I1206 15:54:13.684402 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="eada1e4a-eb19-4b1e-868d-31d913d7b85e" containerName="manager" Dec 06 15:54:13 crc kubenswrapper[5003]: E1206 15:54:13.684411 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="72422d98-0d90-4087-8aae-e78370f932b1" containerName="galera" Dec 06 15:54:13 crc kubenswrapper[5003]: I1206 15:54:13.684418 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="72422d98-0d90-4087-8aae-e78370f932b1" containerName="galera" Dec 06 15:54:13 crc kubenswrapper[5003]: E1206 15:54:13.684428 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6433e1ae-768c-42ba-b961-4bd7bfba8701" containerName="galera" Dec 06 15:54:13 crc kubenswrapper[5003]: I1206 15:54:13.684436 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="6433e1ae-768c-42ba-b961-4bd7bfba8701" containerName="galera" Dec 06 15:54:13 crc kubenswrapper[5003]: E1206 15:54:13.684449 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1e075c35-aaca-468e-9276-0ce9bcb6a394" containerName="keystone-api" Dec 06 15:54:13 crc kubenswrapper[5003]: I1206 15:54:13.684457 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="1e075c35-aaca-468e-9276-0ce9bcb6a394" containerName="keystone-api" Dec 06 15:54:13 crc kubenswrapper[5003]: E1206 15:54:13.684467 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6433e1ae-768c-42ba-b961-4bd7bfba8701" containerName="mysql-bootstrap" Dec 06 15:54:13 crc kubenswrapper[5003]: I1206 15:54:13.684474 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="6433e1ae-768c-42ba-b961-4bd7bfba8701" containerName="mysql-bootstrap" Dec 06 15:54:13 crc kubenswrapper[5003]: E1206 15:54:13.684506 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eb973b37-d488-4739-9c25-96885cc3158b" containerName="manager" Dec 06 15:54:13 crc kubenswrapper[5003]: I1206 15:54:13.684516 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="eb973b37-d488-4739-9c25-96885cc3158b" containerName="manager" Dec 06 15:54:13 crc kubenswrapper[5003]: E1206 15:54:13.684528 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="27124a81-a0ad-4bc6-ad89-d2f5738570bc" containerName="rabbitmq" Dec 06 15:54:13 crc kubenswrapper[5003]: I1206 15:54:13.684537 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="27124a81-a0ad-4bc6-ad89-d2f5738570bc" containerName="rabbitmq" Dec 06 15:54:13 crc kubenswrapper[5003]: E1206 15:54:13.684550 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6b45e98d-ae85-45e0-b565-11db9addcad3" containerName="galera" Dec 06 15:54:13 crc kubenswrapper[5003]: I1206 15:54:13.684557 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="6b45e98d-ae85-45e0-b565-11db9addcad3" containerName="galera" Dec 06 15:54:13 crc kubenswrapper[5003]: E1206 15:54:13.684569 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ff5afab4-f287-43ad-bf14-7ac8c90a52e3" containerName="registry-server" Dec 06 15:54:13 crc kubenswrapper[5003]: I1206 15:54:13.684577 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="ff5afab4-f287-43ad-bf14-7ac8c90a52e3" containerName="registry-server" Dec 06 15:54:13 crc kubenswrapper[5003]: E1206 15:54:13.684587 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="69802b43-d4de-4ef5-9e10-9405562de3e7" containerName="memcached" Dec 06 15:54:13 crc kubenswrapper[5003]: I1206 15:54:13.684595 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="69802b43-d4de-4ef5-9e10-9405562de3e7" containerName="memcached" Dec 06 15:54:13 crc kubenswrapper[5003]: E1206 15:54:13.684610 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="611b7f1b-1296-4dee-a189-7e38e1e1f0b9" containerName="registry-server" Dec 06 15:54:13 crc kubenswrapper[5003]: I1206 15:54:13.684617 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="611b7f1b-1296-4dee-a189-7e38e1e1f0b9" containerName="registry-server" Dec 06 15:54:13 crc kubenswrapper[5003]: I1206 15:54:13.684743 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="815d4582-e47b-4d39-9c18-9886ba2a8e7d" containerName="registry-server" Dec 06 15:54:13 crc kubenswrapper[5003]: I1206 15:54:13.684760 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="6b45e98d-ae85-45e0-b565-11db9addcad3" containerName="galera" Dec 06 15:54:13 crc kubenswrapper[5003]: I1206 15:54:13.684771 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="27124a81-a0ad-4bc6-ad89-d2f5738570bc" containerName="rabbitmq" Dec 06 15:54:13 crc kubenswrapper[5003]: I1206 15:54:13.684780 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="72422d98-0d90-4087-8aae-e78370f932b1" containerName="galera" Dec 06 15:54:13 crc kubenswrapper[5003]: I1206 15:54:13.684790 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="73f33e10-66ff-41e9-97a3-e8cd2db5e39e" containerName="manager" Dec 06 15:54:13 crc kubenswrapper[5003]: I1206 15:54:13.684799 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="611b7f1b-1296-4dee-a189-7e38e1e1f0b9" containerName="registry-server" Dec 06 15:54:13 crc kubenswrapper[5003]: I1206 15:54:13.684812 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="0f4eeada-366f-4403-bd3e-54235105ef11" containerName="manager" Dec 06 15:54:13 crc kubenswrapper[5003]: I1206 15:54:13.684825 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="ef1241df-7f65-49b6-b681-e332717e0d88" containerName="mariadb-account-delete" Dec 06 15:54:13 crc kubenswrapper[5003]: I1206 15:54:13.684836 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="ef1241df-7f65-49b6-b681-e332717e0d88" containerName="mariadb-account-delete" Dec 06 15:54:13 crc kubenswrapper[5003]: I1206 15:54:13.684847 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="aedbe2bf-a5c7-42dc-80b8-2e6fe7ee9c6c" containerName="registry-server" Dec 06 15:54:13 crc kubenswrapper[5003]: I1206 15:54:13.684857 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="ff5afab4-f287-43ad-bf14-7ac8c90a52e3" containerName="registry-server" Dec 06 15:54:13 crc kubenswrapper[5003]: I1206 15:54:13.684867 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="69802b43-d4de-4ef5-9e10-9405562de3e7" containerName="memcached" Dec 06 15:54:13 crc kubenswrapper[5003]: I1206 15:54:13.684875 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="6433e1ae-768c-42ba-b961-4bd7bfba8701" containerName="galera" Dec 06 15:54:13 crc kubenswrapper[5003]: I1206 15:54:13.684885 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="eb973b37-d488-4739-9c25-96885cc3158b" containerName="manager" Dec 06 15:54:13 crc kubenswrapper[5003]: I1206 15:54:13.684894 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="9527a213-4c9a-4477-9876-1b8572119c9a" containerName="registry-server" Dec 06 15:54:13 crc kubenswrapper[5003]: I1206 15:54:13.684903 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="eada1e4a-eb19-4b1e-868d-31d913d7b85e" containerName="manager" Dec 06 15:54:13 crc kubenswrapper[5003]: I1206 15:54:13.684909 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="b87b54c4-3c22-4d05-b500-14a1cc8e99bb" containerName="operator" Dec 06 15:54:13 crc kubenswrapper[5003]: I1206 15:54:13.684917 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="1e075c35-aaca-468e-9276-0ce9bcb6a394" containerName="keystone-api" Dec 06 15:54:13 crc kubenswrapper[5003]: E1206 15:54:13.685031 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ef1241df-7f65-49b6-b681-e332717e0d88" containerName="mariadb-account-delete" Dec 06 15:54:13 crc kubenswrapper[5003]: I1206 15:54:13.685040 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="ef1241df-7f65-49b6-b681-e332717e0d88" containerName="mariadb-account-delete" Dec 06 15:54:13 crc kubenswrapper[5003]: I1206 15:54:13.685747 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-nnm2r/must-gather-dq5zc" Dec 06 15:54:13 crc kubenswrapper[5003]: I1206 15:54:13.687458 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-nnm2r"/"default-dockercfg-jq6q5" Dec 06 15:54:13 crc kubenswrapper[5003]: I1206 15:54:13.693103 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-nnm2r"/"kube-root-ca.crt" Dec 06 15:54:13 crc kubenswrapper[5003]: I1206 15:54:13.693161 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-nnm2r"/"openshift-service-ca.crt" Dec 06 15:54:13 crc kubenswrapper[5003]: I1206 15:54:13.699397 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-nnm2r/must-gather-dq5zc"] Dec 06 15:54:13 crc kubenswrapper[5003]: I1206 15:54:13.818897 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nz99t\" (UniqueName: \"kubernetes.io/projected/a0351ba8-d09e-4361-bc05-f354a1dc24c4-kube-api-access-nz99t\") pod \"must-gather-dq5zc\" (UID: \"a0351ba8-d09e-4361-bc05-f354a1dc24c4\") " pod="openshift-must-gather-nnm2r/must-gather-dq5zc" Dec 06 15:54:13 crc kubenswrapper[5003]: I1206 15:54:13.818951 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/a0351ba8-d09e-4361-bc05-f354a1dc24c4-must-gather-output\") pod \"must-gather-dq5zc\" (UID: \"a0351ba8-d09e-4361-bc05-f354a1dc24c4\") " pod="openshift-must-gather-nnm2r/must-gather-dq5zc" Dec 06 15:54:13 crc kubenswrapper[5003]: I1206 15:54:13.920130 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nz99t\" (UniqueName: \"kubernetes.io/projected/a0351ba8-d09e-4361-bc05-f354a1dc24c4-kube-api-access-nz99t\") pod \"must-gather-dq5zc\" (UID: \"a0351ba8-d09e-4361-bc05-f354a1dc24c4\") " pod="openshift-must-gather-nnm2r/must-gather-dq5zc" Dec 06 15:54:13 crc kubenswrapper[5003]: I1206 15:54:13.920183 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/a0351ba8-d09e-4361-bc05-f354a1dc24c4-must-gather-output\") pod \"must-gather-dq5zc\" (UID: \"a0351ba8-d09e-4361-bc05-f354a1dc24c4\") " pod="openshift-must-gather-nnm2r/must-gather-dq5zc" Dec 06 15:54:13 crc kubenswrapper[5003]: I1206 15:54:13.920705 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/a0351ba8-d09e-4361-bc05-f354a1dc24c4-must-gather-output\") pod \"must-gather-dq5zc\" (UID: \"a0351ba8-d09e-4361-bc05-f354a1dc24c4\") " pod="openshift-must-gather-nnm2r/must-gather-dq5zc" Dec 06 15:54:13 crc kubenswrapper[5003]: I1206 15:54:13.946041 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nz99t\" (UniqueName: \"kubernetes.io/projected/a0351ba8-d09e-4361-bc05-f354a1dc24c4-kube-api-access-nz99t\") pod \"must-gather-dq5zc\" (UID: \"a0351ba8-d09e-4361-bc05-f354a1dc24c4\") " pod="openshift-must-gather-nnm2r/must-gather-dq5zc" Dec 06 15:54:14 crc kubenswrapper[5003]: I1206 15:54:14.006979 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-nnm2r/must-gather-dq5zc" Dec 06 15:54:14 crc kubenswrapper[5003]: I1206 15:54:14.203707 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-nnm2r/must-gather-dq5zc"] Dec 06 15:54:15 crc kubenswrapper[5003]: I1206 15:54:15.065441 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-nnm2r/must-gather-dq5zc" event={"ID":"a0351ba8-d09e-4361-bc05-f354a1dc24c4","Type":"ContainerStarted","Data":"3ffdc64705bb30c6663c8902c682ff792895fba178ffff03bfcb6558874af603"} Dec 06 15:54:18 crc kubenswrapper[5003]: I1206 15:54:18.082470 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-nnm2r/must-gather-dq5zc" event={"ID":"a0351ba8-d09e-4361-bc05-f354a1dc24c4","Type":"ContainerStarted","Data":"536303f7ffe7a4168799dae472a15f3694bb40732d2205597cbf0e188b4952a7"} Dec 06 15:54:18 crc kubenswrapper[5003]: I1206 15:54:18.082865 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-nnm2r/must-gather-dq5zc" event={"ID":"a0351ba8-d09e-4361-bc05-f354a1dc24c4","Type":"ContainerStarted","Data":"7891db6c0596ee72d985377e97775f4106d5000fe8290ebd22c709ffe5e877ce"} Dec 06 15:54:18 crc kubenswrapper[5003]: I1206 15:54:18.098627 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-nnm2r/must-gather-dq5zc" podStartSLOduration=1.751972929 podStartE2EDuration="5.0986085s" podCreationTimestamp="2025-12-06 15:54:13 +0000 UTC" firstStartedPulling="2025-12-06 15:54:14.212257902 +0000 UTC m=+1332.745612283" lastFinishedPulling="2025-12-06 15:54:17.558893473 +0000 UTC m=+1336.092247854" observedRunningTime="2025-12-06 15:54:18.095446622 +0000 UTC m=+1336.628801013" watchObservedRunningTime="2025-12-06 15:54:18.0986085 +0000 UTC m=+1336.631962871" Dec 06 15:54:18 crc kubenswrapper[5003]: I1206 15:54:18.572325 5003 patch_prober.go:28] interesting pod/machine-config-daemon-w25db container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 06 15:54:18 crc kubenswrapper[5003]: I1206 15:54:18.572397 5003 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-w25db" podUID="1a047c4d-003e-4668-9b96-945eab34ab68" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 06 15:54:18 crc kubenswrapper[5003]: I1206 15:54:18.572451 5003 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-w25db" Dec 06 15:54:18 crc kubenswrapper[5003]: I1206 15:54:18.573112 5003 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"fda5b3a83c2db0cd7e8cd10cafa27a87f57daf7b92848cde69fdb9048350b316"} pod="openshift-machine-config-operator/machine-config-daemon-w25db" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 06 15:54:18 crc kubenswrapper[5003]: I1206 15:54:18.573232 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-w25db" podUID="1a047c4d-003e-4668-9b96-945eab34ab68" containerName="machine-config-daemon" containerID="cri-o://fda5b3a83c2db0cd7e8cd10cafa27a87f57daf7b92848cde69fdb9048350b316" gracePeriod=600 Dec 06 15:54:19 crc kubenswrapper[5003]: I1206 15:54:19.090976 5003 generic.go:334] "Generic (PLEG): container finished" podID="1a047c4d-003e-4668-9b96-945eab34ab68" containerID="fda5b3a83c2db0cd7e8cd10cafa27a87f57daf7b92848cde69fdb9048350b316" exitCode=0 Dec 06 15:54:19 crc kubenswrapper[5003]: I1206 15:54:19.091035 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-w25db" event={"ID":"1a047c4d-003e-4668-9b96-945eab34ab68","Type":"ContainerDied","Data":"fda5b3a83c2db0cd7e8cd10cafa27a87f57daf7b92848cde69fdb9048350b316"} Dec 06 15:54:19 crc kubenswrapper[5003]: I1206 15:54:19.091313 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-w25db" event={"ID":"1a047c4d-003e-4668-9b96-945eab34ab68","Type":"ContainerStarted","Data":"5cab9da0a1a459e6915d570312c7512355a1b9744939c5aec7b823b431cc45d8"} Dec 06 15:54:19 crc kubenswrapper[5003]: I1206 15:54:19.091332 5003 scope.go:117] "RemoveContainer" containerID="78b1e361c0889fb22d06542ab25b57331309a42111ebfeb58f0849e826b8ef88" Dec 06 15:54:57 crc kubenswrapper[5003]: I1206 15:54:57.885290 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-x6k88_8e450a8e-52f9-48fe-96c8-8f444a7437fe/control-plane-machine-set-operator/0.log" Dec 06 15:54:58 crc kubenswrapper[5003]: I1206 15:54:58.038576 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-dh4ts_07dcad69-d3a4-40e2-a4d2-e83eb74631d7/machine-api-operator/0.log" Dec 06 15:54:58 crc kubenswrapper[5003]: I1206 15:54:58.065903 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-dh4ts_07dcad69-d3a4-40e2-a4d2-e83eb74631d7/kube-rbac-proxy/0.log" Dec 06 15:55:04 crc kubenswrapper[5003]: I1206 15:55:04.857738 5003 scope.go:117] "RemoveContainer" containerID="ebd784c96d4ff0bdd3b40fde0d567beae42f4ad02e2fe4f00d51eb631e82d81f" Dec 06 15:55:04 crc kubenswrapper[5003]: I1206 15:55:04.880925 5003 scope.go:117] "RemoveContainer" containerID="168cb895292e1f27f56cc10297cefd2eccb5009267495e74fc09d888768d1613" Dec 06 15:55:04 crc kubenswrapper[5003]: I1206 15:55:04.916674 5003 scope.go:117] "RemoveContainer" containerID="8267da1378a67e96a4b47444d38b31927526226b6024806834287a7fc1acb5b1" Dec 06 15:55:12 crc kubenswrapper[5003]: I1206 15:55:12.265524 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-f8648f98b-2ddqw_949aaefb-e672-4000-8a50-e943723611ff/kube-rbac-proxy/0.log" Dec 06 15:55:12 crc kubenswrapper[5003]: I1206 15:55:12.276652 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-f8648f98b-2ddqw_949aaefb-e672-4000-8a50-e943723611ff/controller/0.log" Dec 06 15:55:12 crc kubenswrapper[5003]: I1206 15:55:12.417880 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-fs94m_395612ce-6ba7-4b60-822c-dbae3eea5e7f/cp-frr-files/0.log" Dec 06 15:55:12 crc kubenswrapper[5003]: I1206 15:55:12.577569 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-fs94m_395612ce-6ba7-4b60-822c-dbae3eea5e7f/cp-reloader/0.log" Dec 06 15:55:12 crc kubenswrapper[5003]: I1206 15:55:12.578198 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-fs94m_395612ce-6ba7-4b60-822c-dbae3eea5e7f/cp-metrics/0.log" Dec 06 15:55:12 crc kubenswrapper[5003]: I1206 15:55:12.618238 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-fs94m_395612ce-6ba7-4b60-822c-dbae3eea5e7f/cp-frr-files/0.log" Dec 06 15:55:12 crc kubenswrapper[5003]: I1206 15:55:12.627783 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-fs94m_395612ce-6ba7-4b60-822c-dbae3eea5e7f/cp-reloader/0.log" Dec 06 15:55:12 crc kubenswrapper[5003]: I1206 15:55:12.810624 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-fs94m_395612ce-6ba7-4b60-822c-dbae3eea5e7f/cp-metrics/0.log" Dec 06 15:55:12 crc kubenswrapper[5003]: I1206 15:55:12.819543 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-fs94m_395612ce-6ba7-4b60-822c-dbae3eea5e7f/cp-frr-files/0.log" Dec 06 15:55:12 crc kubenswrapper[5003]: I1206 15:55:12.846946 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-fs94m_395612ce-6ba7-4b60-822c-dbae3eea5e7f/cp-reloader/0.log" Dec 06 15:55:12 crc kubenswrapper[5003]: I1206 15:55:12.871195 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-fs94m_395612ce-6ba7-4b60-822c-dbae3eea5e7f/cp-metrics/0.log" Dec 06 15:55:12 crc kubenswrapper[5003]: I1206 15:55:12.998450 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-fs94m_395612ce-6ba7-4b60-822c-dbae3eea5e7f/cp-frr-files/0.log" Dec 06 15:55:13 crc kubenswrapper[5003]: I1206 15:55:13.013439 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-fs94m_395612ce-6ba7-4b60-822c-dbae3eea5e7f/cp-reloader/0.log" Dec 06 15:55:13 crc kubenswrapper[5003]: I1206 15:55:13.079026 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-fs94m_395612ce-6ba7-4b60-822c-dbae3eea5e7f/cp-metrics/0.log" Dec 06 15:55:13 crc kubenswrapper[5003]: I1206 15:55:13.112625 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-fs94m_395612ce-6ba7-4b60-822c-dbae3eea5e7f/controller/0.log" Dec 06 15:55:13 crc kubenswrapper[5003]: I1206 15:55:13.262461 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-fs94m_395612ce-6ba7-4b60-822c-dbae3eea5e7f/kube-rbac-proxy/0.log" Dec 06 15:55:13 crc kubenswrapper[5003]: I1206 15:55:13.275670 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-fs94m_395612ce-6ba7-4b60-822c-dbae3eea5e7f/frr-metrics/0.log" Dec 06 15:55:13 crc kubenswrapper[5003]: I1206 15:55:13.296665 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-fs94m_395612ce-6ba7-4b60-822c-dbae3eea5e7f/kube-rbac-proxy-frr/0.log" Dec 06 15:55:13 crc kubenswrapper[5003]: I1206 15:55:13.424931 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-fs94m_395612ce-6ba7-4b60-822c-dbae3eea5e7f/frr/0.log" Dec 06 15:55:13 crc kubenswrapper[5003]: I1206 15:55:13.453219 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-fs94m_395612ce-6ba7-4b60-822c-dbae3eea5e7f/reloader/0.log" Dec 06 15:55:13 crc kubenswrapper[5003]: I1206 15:55:13.547517 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-webhook-server-7fcb986d4-2jzmn_8d4e8c3d-e37b-4489-bd61-84af9e792de1/frr-k8s-webhook-server/0.log" Dec 06 15:55:13 crc kubenswrapper[5003]: I1206 15:55:13.638920 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-controller-manager-6fb7b5787c-jtd2n_e1a24ae6-f251-42bc-bb3e-1d9bd03dd13e/manager/0.log" Dec 06 15:55:13 crc kubenswrapper[5003]: I1206 15:55:13.738499 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-webhook-server-576499f99b-6pz7r_49994fd5-c0aa-446a-b546-d3e0acc4fa81/webhook-server/0.log" Dec 06 15:55:13 crc kubenswrapper[5003]: I1206 15:55:13.819440 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-vtb75_50405731-e195-43a8-a231-895b2b19b554/kube-rbac-proxy/0.log" Dec 06 15:55:13 crc kubenswrapper[5003]: I1206 15:55:13.996356 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-vtb75_50405731-e195-43a8-a231-895b2b19b554/speaker/0.log" Dec 06 15:55:35 crc kubenswrapper[5003]: I1206 15:55:35.600024 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83gjfvn_ccb39b5c-9a0b-4ce9-a83c-a41fda667b92/util/0.log" Dec 06 15:55:35 crc kubenswrapper[5003]: I1206 15:55:35.740260 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83gjfvn_ccb39b5c-9a0b-4ce9-a83c-a41fda667b92/util/0.log" Dec 06 15:55:35 crc kubenswrapper[5003]: I1206 15:55:35.777608 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83gjfvn_ccb39b5c-9a0b-4ce9-a83c-a41fda667b92/pull/0.log" Dec 06 15:55:35 crc kubenswrapper[5003]: I1206 15:55:35.777631 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83gjfvn_ccb39b5c-9a0b-4ce9-a83c-a41fda667b92/pull/0.log" Dec 06 15:55:35 crc kubenswrapper[5003]: I1206 15:55:35.924648 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83gjfvn_ccb39b5c-9a0b-4ce9-a83c-a41fda667b92/pull/0.log" Dec 06 15:55:35 crc kubenswrapper[5003]: I1206 15:55:35.928042 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83gjfvn_ccb39b5c-9a0b-4ce9-a83c-a41fda667b92/extract/0.log" Dec 06 15:55:35 crc kubenswrapper[5003]: I1206 15:55:35.975972 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83gjfvn_ccb39b5c-9a0b-4ce9-a83c-a41fda667b92/util/0.log" Dec 06 15:55:36 crc kubenswrapper[5003]: I1206 15:55:36.080182 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-cvx9j_1b508a70-c3a8-4f75-ae70-38613a4011cb/extract-utilities/0.log" Dec 06 15:55:36 crc kubenswrapper[5003]: I1206 15:55:36.287551 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-cvx9j_1b508a70-c3a8-4f75-ae70-38613a4011cb/extract-utilities/0.log" Dec 06 15:55:36 crc kubenswrapper[5003]: I1206 15:55:36.297349 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-cvx9j_1b508a70-c3a8-4f75-ae70-38613a4011cb/extract-content/0.log" Dec 06 15:55:36 crc kubenswrapper[5003]: I1206 15:55:36.303058 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-cvx9j_1b508a70-c3a8-4f75-ae70-38613a4011cb/extract-content/0.log" Dec 06 15:55:36 crc kubenswrapper[5003]: I1206 15:55:36.479433 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-cvx9j_1b508a70-c3a8-4f75-ae70-38613a4011cb/extract-content/0.log" Dec 06 15:55:36 crc kubenswrapper[5003]: I1206 15:55:36.489058 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-cvx9j_1b508a70-c3a8-4f75-ae70-38613a4011cb/extract-utilities/0.log" Dec 06 15:55:36 crc kubenswrapper[5003]: I1206 15:55:36.702610 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-brzlr_462e91e7-2b3b-4fd2-bbb1-94b4a727fe1e/extract-utilities/0.log" Dec 06 15:55:36 crc kubenswrapper[5003]: I1206 15:55:36.767963 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-cvx9j_1b508a70-c3a8-4f75-ae70-38613a4011cb/registry-server/0.log" Dec 06 15:55:36 crc kubenswrapper[5003]: I1206 15:55:36.866008 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-brzlr_462e91e7-2b3b-4fd2-bbb1-94b4a727fe1e/extract-utilities/0.log" Dec 06 15:55:36 crc kubenswrapper[5003]: I1206 15:55:36.883260 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-brzlr_462e91e7-2b3b-4fd2-bbb1-94b4a727fe1e/extract-content/0.log" Dec 06 15:55:36 crc kubenswrapper[5003]: I1206 15:55:36.898806 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-brzlr_462e91e7-2b3b-4fd2-bbb1-94b4a727fe1e/extract-content/0.log" Dec 06 15:55:37 crc kubenswrapper[5003]: I1206 15:55:37.066392 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-brzlr_462e91e7-2b3b-4fd2-bbb1-94b4a727fe1e/extract-content/0.log" Dec 06 15:55:37 crc kubenswrapper[5003]: I1206 15:55:37.078364 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-brzlr_462e91e7-2b3b-4fd2-bbb1-94b4a727fe1e/extract-utilities/0.log" Dec 06 15:55:37 crc kubenswrapper[5003]: I1206 15:55:37.266147 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-5fqrn_4ae558d3-8724-4da4-bd37-89893945a2f3/marketplace-operator/0.log" Dec 06 15:55:37 crc kubenswrapper[5003]: I1206 15:55:37.304544 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-n6k6n_c4c46a65-2b6a-413a-9dd9-5aaa2d2041f4/extract-utilities/0.log" Dec 06 15:55:37 crc kubenswrapper[5003]: I1206 15:55:37.471634 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-brzlr_462e91e7-2b3b-4fd2-bbb1-94b4a727fe1e/registry-server/0.log" Dec 06 15:55:37 crc kubenswrapper[5003]: I1206 15:55:37.511901 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-n6k6n_c4c46a65-2b6a-413a-9dd9-5aaa2d2041f4/extract-utilities/0.log" Dec 06 15:55:37 crc kubenswrapper[5003]: I1206 15:55:37.554296 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-n6k6n_c4c46a65-2b6a-413a-9dd9-5aaa2d2041f4/extract-content/0.log" Dec 06 15:55:37 crc kubenswrapper[5003]: I1206 15:55:37.575086 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-n6k6n_c4c46a65-2b6a-413a-9dd9-5aaa2d2041f4/extract-content/0.log" Dec 06 15:55:37 crc kubenswrapper[5003]: I1206 15:55:37.718832 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-n6k6n_c4c46a65-2b6a-413a-9dd9-5aaa2d2041f4/extract-utilities/0.log" Dec 06 15:55:37 crc kubenswrapper[5003]: I1206 15:55:37.729933 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-n6k6n_c4c46a65-2b6a-413a-9dd9-5aaa2d2041f4/extract-content/0.log" Dec 06 15:55:37 crc kubenswrapper[5003]: I1206 15:55:37.767987 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-n6k6n_c4c46a65-2b6a-413a-9dd9-5aaa2d2041f4/registry-server/0.log" Dec 06 15:55:37 crc kubenswrapper[5003]: I1206 15:55:37.880133 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-4f6q7_ebf945ad-37bf-4837-8fce-af8b8634c82f/extract-utilities/0.log" Dec 06 15:55:38 crc kubenswrapper[5003]: I1206 15:55:38.025874 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-4f6q7_ebf945ad-37bf-4837-8fce-af8b8634c82f/extract-utilities/0.log" Dec 06 15:55:38 crc kubenswrapper[5003]: I1206 15:55:38.043237 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-4f6q7_ebf945ad-37bf-4837-8fce-af8b8634c82f/extract-content/0.log" Dec 06 15:55:38 crc kubenswrapper[5003]: I1206 15:55:38.048358 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-4f6q7_ebf945ad-37bf-4837-8fce-af8b8634c82f/extract-content/0.log" Dec 06 15:55:38 crc kubenswrapper[5003]: I1206 15:55:38.230771 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-4f6q7_ebf945ad-37bf-4837-8fce-af8b8634c82f/extract-utilities/0.log" Dec 06 15:55:38 crc kubenswrapper[5003]: I1206 15:55:38.251910 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-4f6q7_ebf945ad-37bf-4837-8fce-af8b8634c82f/extract-content/0.log" Dec 06 15:55:38 crc kubenswrapper[5003]: I1206 15:55:38.494386 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-4f6q7_ebf945ad-37bf-4837-8fce-af8b8634c82f/registry-server/0.log" Dec 06 15:56:04 crc kubenswrapper[5003]: I1206 15:56:04.976595 5003 scope.go:117] "RemoveContainer" containerID="b483224610044675bd95a22c414108875c122a9130077a4962e73c2bcf9f630b" Dec 06 15:56:05 crc kubenswrapper[5003]: I1206 15:56:05.010940 5003 scope.go:117] "RemoveContainer" containerID="2000dcc5d69c8b5474589b13e6409f895e4aa8079dbf5c1e6c486a8395c2d4a7" Dec 06 15:56:05 crc kubenswrapper[5003]: I1206 15:56:05.035991 5003 scope.go:117] "RemoveContainer" containerID="e5ac28d7096c779da08aa683ede92fe0eae4d2423f15764452c422549471be10" Dec 06 15:56:18 crc kubenswrapper[5003]: I1206 15:56:18.572166 5003 patch_prober.go:28] interesting pod/machine-config-daemon-w25db container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 06 15:56:18 crc kubenswrapper[5003]: I1206 15:56:18.572678 5003 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-w25db" podUID="1a047c4d-003e-4668-9b96-945eab34ab68" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 06 15:56:44 crc kubenswrapper[5003]: I1206 15:56:44.974476 5003 generic.go:334] "Generic (PLEG): container finished" podID="a0351ba8-d09e-4361-bc05-f354a1dc24c4" containerID="7891db6c0596ee72d985377e97775f4106d5000fe8290ebd22c709ffe5e877ce" exitCode=0 Dec 06 15:56:44 crc kubenswrapper[5003]: I1206 15:56:44.974604 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-nnm2r/must-gather-dq5zc" event={"ID":"a0351ba8-d09e-4361-bc05-f354a1dc24c4","Type":"ContainerDied","Data":"7891db6c0596ee72d985377e97775f4106d5000fe8290ebd22c709ffe5e877ce"} Dec 06 15:56:44 crc kubenswrapper[5003]: I1206 15:56:44.978952 5003 scope.go:117] "RemoveContainer" containerID="7891db6c0596ee72d985377e97775f4106d5000fe8290ebd22c709ffe5e877ce" Dec 06 15:56:45 crc kubenswrapper[5003]: I1206 15:56:45.054119 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-nnm2r_must-gather-dq5zc_a0351ba8-d09e-4361-bc05-f354a1dc24c4/gather/0.log" Dec 06 15:56:48 crc kubenswrapper[5003]: I1206 15:56:48.572839 5003 patch_prober.go:28] interesting pod/machine-config-daemon-w25db container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 06 15:56:48 crc kubenswrapper[5003]: I1206 15:56:48.573211 5003 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-w25db" podUID="1a047c4d-003e-4668-9b96-945eab34ab68" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 06 15:56:51 crc kubenswrapper[5003]: I1206 15:56:51.811405 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-nnm2r/must-gather-dq5zc"] Dec 06 15:56:51 crc kubenswrapper[5003]: I1206 15:56:51.812342 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-must-gather-nnm2r/must-gather-dq5zc" podUID="a0351ba8-d09e-4361-bc05-f354a1dc24c4" containerName="copy" containerID="cri-o://536303f7ffe7a4168799dae472a15f3694bb40732d2205597cbf0e188b4952a7" gracePeriod=2 Dec 06 15:56:51 crc kubenswrapper[5003]: I1206 15:56:51.815567 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-nnm2r/must-gather-dq5zc"] Dec 06 15:56:52 crc kubenswrapper[5003]: I1206 15:56:52.029308 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-nnm2r_must-gather-dq5zc_a0351ba8-d09e-4361-bc05-f354a1dc24c4/copy/0.log" Dec 06 15:56:52 crc kubenswrapper[5003]: I1206 15:56:52.030043 5003 generic.go:334] "Generic (PLEG): container finished" podID="a0351ba8-d09e-4361-bc05-f354a1dc24c4" containerID="536303f7ffe7a4168799dae472a15f3694bb40732d2205597cbf0e188b4952a7" exitCode=143 Dec 06 15:56:52 crc kubenswrapper[5003]: I1206 15:56:52.147109 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-nnm2r_must-gather-dq5zc_a0351ba8-d09e-4361-bc05-f354a1dc24c4/copy/0.log" Dec 06 15:56:52 crc kubenswrapper[5003]: I1206 15:56:52.147525 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-nnm2r/must-gather-dq5zc" Dec 06 15:56:52 crc kubenswrapper[5003]: I1206 15:56:52.272240 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nz99t\" (UniqueName: \"kubernetes.io/projected/a0351ba8-d09e-4361-bc05-f354a1dc24c4-kube-api-access-nz99t\") pod \"a0351ba8-d09e-4361-bc05-f354a1dc24c4\" (UID: \"a0351ba8-d09e-4361-bc05-f354a1dc24c4\") " Dec 06 15:56:52 crc kubenswrapper[5003]: I1206 15:56:52.272302 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/a0351ba8-d09e-4361-bc05-f354a1dc24c4-must-gather-output\") pod \"a0351ba8-d09e-4361-bc05-f354a1dc24c4\" (UID: \"a0351ba8-d09e-4361-bc05-f354a1dc24c4\") " Dec 06 15:56:52 crc kubenswrapper[5003]: I1206 15:56:52.281222 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a0351ba8-d09e-4361-bc05-f354a1dc24c4-kube-api-access-nz99t" (OuterVolumeSpecName: "kube-api-access-nz99t") pod "a0351ba8-d09e-4361-bc05-f354a1dc24c4" (UID: "a0351ba8-d09e-4361-bc05-f354a1dc24c4"). InnerVolumeSpecName "kube-api-access-nz99t". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 15:56:52 crc kubenswrapper[5003]: I1206 15:56:52.348505 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a0351ba8-d09e-4361-bc05-f354a1dc24c4-must-gather-output" (OuterVolumeSpecName: "must-gather-output") pod "a0351ba8-d09e-4361-bc05-f354a1dc24c4" (UID: "a0351ba8-d09e-4361-bc05-f354a1dc24c4"). InnerVolumeSpecName "must-gather-output". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 15:56:52 crc kubenswrapper[5003]: I1206 15:56:52.374400 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nz99t\" (UniqueName: \"kubernetes.io/projected/a0351ba8-d09e-4361-bc05-f354a1dc24c4-kube-api-access-nz99t\") on node \"crc\" DevicePath \"\"" Dec 06 15:56:52 crc kubenswrapper[5003]: I1206 15:56:52.374745 5003 reconciler_common.go:293] "Volume detached for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/a0351ba8-d09e-4361-bc05-f354a1dc24c4-must-gather-output\") on node \"crc\" DevicePath \"\"" Dec 06 15:56:53 crc kubenswrapper[5003]: I1206 15:56:53.037740 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-nnm2r_must-gather-dq5zc_a0351ba8-d09e-4361-bc05-f354a1dc24c4/copy/0.log" Dec 06 15:56:53 crc kubenswrapper[5003]: I1206 15:56:53.038231 5003 scope.go:117] "RemoveContainer" containerID="536303f7ffe7a4168799dae472a15f3694bb40732d2205597cbf0e188b4952a7" Dec 06 15:56:53 crc kubenswrapper[5003]: I1206 15:56:53.038419 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-nnm2r/must-gather-dq5zc" Dec 06 15:56:53 crc kubenswrapper[5003]: I1206 15:56:53.056156 5003 scope.go:117] "RemoveContainer" containerID="7891db6c0596ee72d985377e97775f4106d5000fe8290ebd22c709ffe5e877ce" Dec 06 15:56:53 crc kubenswrapper[5003]: I1206 15:56:53.722328 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a0351ba8-d09e-4361-bc05-f354a1dc24c4" path="/var/lib/kubelet/pods/a0351ba8-d09e-4361-bc05-f354a1dc24c4/volumes" Dec 06 15:57:05 crc kubenswrapper[5003]: I1206 15:57:05.088549 5003 scope.go:117] "RemoveContainer" containerID="5bbecf4f38326399ef44b8146d9bda2d3582d30047f56f8aa0c02a184aac3010" Dec 06 15:57:05 crc kubenswrapper[5003]: I1206 15:57:05.115938 5003 scope.go:117] "RemoveContainer" containerID="2be2e731a531caf7f63849f685eb6863a0f556de43b24c7db66f82e910442cff" Dec 06 15:57:05 crc kubenswrapper[5003]: I1206 15:57:05.171251 5003 scope.go:117] "RemoveContainer" containerID="9c3cd840a8bfadb331b3c541574fded66678f4dc0928439e880d04889221b457" Dec 06 15:57:05 crc kubenswrapper[5003]: I1206 15:57:05.190241 5003 scope.go:117] "RemoveContainer" containerID="b91689da9f4bcbca694055bf7c3acfb1316d57f399775ac2bd45d36cf215f8ab" Dec 06 15:57:05 crc kubenswrapper[5003]: I1206 15:57:05.217327 5003 scope.go:117] "RemoveContainer" containerID="d82d5cc08343004d706177acded13a64e739dc80cb19dc8256a8b7daf1f276c2" Dec 06 15:57:05 crc kubenswrapper[5003]: I1206 15:57:05.250984 5003 scope.go:117] "RemoveContainer" containerID="61043170fc7826b1fca90d7b07bc19e9b2e3dac557173f07c80068e8c398ab34" Dec 06 15:57:05 crc kubenswrapper[5003]: I1206 15:57:05.273715 5003 scope.go:117] "RemoveContainer" containerID="6c4f1bf22a644bb37729dee341fb47738c94559bb939778fac9a7e4e03de14f4" Dec 06 15:57:16 crc kubenswrapper[5003]: I1206 15:57:16.259689 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-vmz29"] Dec 06 15:57:16 crc kubenswrapper[5003]: E1206 15:57:16.260886 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a0351ba8-d09e-4361-bc05-f354a1dc24c4" containerName="copy" Dec 06 15:57:16 crc kubenswrapper[5003]: I1206 15:57:16.260917 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="a0351ba8-d09e-4361-bc05-f354a1dc24c4" containerName="copy" Dec 06 15:57:16 crc kubenswrapper[5003]: E1206 15:57:16.260942 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a0351ba8-d09e-4361-bc05-f354a1dc24c4" containerName="gather" Dec 06 15:57:16 crc kubenswrapper[5003]: I1206 15:57:16.260957 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="a0351ba8-d09e-4361-bc05-f354a1dc24c4" containerName="gather" Dec 06 15:57:16 crc kubenswrapper[5003]: I1206 15:57:16.261237 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="a0351ba8-d09e-4361-bc05-f354a1dc24c4" containerName="gather" Dec 06 15:57:16 crc kubenswrapper[5003]: I1206 15:57:16.261267 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="a0351ba8-d09e-4361-bc05-f354a1dc24c4" containerName="copy" Dec 06 15:57:16 crc kubenswrapper[5003]: I1206 15:57:16.262559 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-vmz29" Dec 06 15:57:16 crc kubenswrapper[5003]: I1206 15:57:16.313900 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-vmz29"] Dec 06 15:57:16 crc kubenswrapper[5003]: I1206 15:57:16.318903 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0f119d07-a311-4290-9a4f-4282c9887c72-utilities\") pod \"redhat-operators-vmz29\" (UID: \"0f119d07-a311-4290-9a4f-4282c9887c72\") " pod="openshift-marketplace/redhat-operators-vmz29" Dec 06 15:57:16 crc kubenswrapper[5003]: I1206 15:57:16.319058 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qxssv\" (UniqueName: \"kubernetes.io/projected/0f119d07-a311-4290-9a4f-4282c9887c72-kube-api-access-qxssv\") pod \"redhat-operators-vmz29\" (UID: \"0f119d07-a311-4290-9a4f-4282c9887c72\") " pod="openshift-marketplace/redhat-operators-vmz29" Dec 06 15:57:16 crc kubenswrapper[5003]: I1206 15:57:16.319096 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0f119d07-a311-4290-9a4f-4282c9887c72-catalog-content\") pod \"redhat-operators-vmz29\" (UID: \"0f119d07-a311-4290-9a4f-4282c9887c72\") " pod="openshift-marketplace/redhat-operators-vmz29" Dec 06 15:57:16 crc kubenswrapper[5003]: I1206 15:57:16.420461 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qxssv\" (UniqueName: \"kubernetes.io/projected/0f119d07-a311-4290-9a4f-4282c9887c72-kube-api-access-qxssv\") pod \"redhat-operators-vmz29\" (UID: \"0f119d07-a311-4290-9a4f-4282c9887c72\") " pod="openshift-marketplace/redhat-operators-vmz29" Dec 06 15:57:16 crc kubenswrapper[5003]: I1206 15:57:16.420547 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0f119d07-a311-4290-9a4f-4282c9887c72-catalog-content\") pod \"redhat-operators-vmz29\" (UID: \"0f119d07-a311-4290-9a4f-4282c9887c72\") " pod="openshift-marketplace/redhat-operators-vmz29" Dec 06 15:57:16 crc kubenswrapper[5003]: I1206 15:57:16.420595 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0f119d07-a311-4290-9a4f-4282c9887c72-utilities\") pod \"redhat-operators-vmz29\" (UID: \"0f119d07-a311-4290-9a4f-4282c9887c72\") " pod="openshift-marketplace/redhat-operators-vmz29" Dec 06 15:57:16 crc kubenswrapper[5003]: I1206 15:57:16.421051 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0f119d07-a311-4290-9a4f-4282c9887c72-utilities\") pod \"redhat-operators-vmz29\" (UID: \"0f119d07-a311-4290-9a4f-4282c9887c72\") " pod="openshift-marketplace/redhat-operators-vmz29" Dec 06 15:57:16 crc kubenswrapper[5003]: I1206 15:57:16.421196 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0f119d07-a311-4290-9a4f-4282c9887c72-catalog-content\") pod \"redhat-operators-vmz29\" (UID: \"0f119d07-a311-4290-9a4f-4282c9887c72\") " pod="openshift-marketplace/redhat-operators-vmz29" Dec 06 15:57:16 crc kubenswrapper[5003]: I1206 15:57:16.443190 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qxssv\" (UniqueName: \"kubernetes.io/projected/0f119d07-a311-4290-9a4f-4282c9887c72-kube-api-access-qxssv\") pod \"redhat-operators-vmz29\" (UID: \"0f119d07-a311-4290-9a4f-4282c9887c72\") " pod="openshift-marketplace/redhat-operators-vmz29" Dec 06 15:57:16 crc kubenswrapper[5003]: I1206 15:57:16.625158 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-vmz29" Dec 06 15:57:16 crc kubenswrapper[5003]: I1206 15:57:16.841052 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-vmz29"] Dec 06 15:57:17 crc kubenswrapper[5003]: I1206 15:57:17.202133 5003 generic.go:334] "Generic (PLEG): container finished" podID="0f119d07-a311-4290-9a4f-4282c9887c72" containerID="38543ae3377e3948519ab83e415fc8d76e2ad7381d811be68e79caf577540355" exitCode=0 Dec 06 15:57:17 crc kubenswrapper[5003]: I1206 15:57:17.202232 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vmz29" event={"ID":"0f119d07-a311-4290-9a4f-4282c9887c72","Type":"ContainerDied","Data":"38543ae3377e3948519ab83e415fc8d76e2ad7381d811be68e79caf577540355"} Dec 06 15:57:17 crc kubenswrapper[5003]: I1206 15:57:17.202452 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vmz29" event={"ID":"0f119d07-a311-4290-9a4f-4282c9887c72","Type":"ContainerStarted","Data":"c123935f93a0a461563cf1d04c078fdc8f0e20d5057468a18a42ddaf9d9e65e1"} Dec 06 15:57:17 crc kubenswrapper[5003]: I1206 15:57:17.204076 5003 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 06 15:57:18 crc kubenswrapper[5003]: I1206 15:57:18.572733 5003 patch_prober.go:28] interesting pod/machine-config-daemon-w25db container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 06 15:57:18 crc kubenswrapper[5003]: I1206 15:57:18.573923 5003 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-w25db" podUID="1a047c4d-003e-4668-9b96-945eab34ab68" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 06 15:57:18 crc kubenswrapper[5003]: I1206 15:57:18.574061 5003 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-w25db" Dec 06 15:57:18 crc kubenswrapper[5003]: I1206 15:57:18.574772 5003 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"5cab9da0a1a459e6915d570312c7512355a1b9744939c5aec7b823b431cc45d8"} pod="openshift-machine-config-operator/machine-config-daemon-w25db" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 06 15:57:18 crc kubenswrapper[5003]: I1206 15:57:18.574904 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-w25db" podUID="1a047c4d-003e-4668-9b96-945eab34ab68" containerName="machine-config-daemon" containerID="cri-o://5cab9da0a1a459e6915d570312c7512355a1b9744939c5aec7b823b431cc45d8" gracePeriod=600 Dec 06 15:57:18 crc kubenswrapper[5003]: E1206 15:57:18.715347 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w25db_openshift-machine-config-operator(1a047c4d-003e-4668-9b96-945eab34ab68)\"" pod="openshift-machine-config-operator/machine-config-daemon-w25db" podUID="1a047c4d-003e-4668-9b96-945eab34ab68" Dec 06 15:57:19 crc kubenswrapper[5003]: I1206 15:57:19.217522 5003 generic.go:334] "Generic (PLEG): container finished" podID="1a047c4d-003e-4668-9b96-945eab34ab68" containerID="5cab9da0a1a459e6915d570312c7512355a1b9744939c5aec7b823b431cc45d8" exitCode=0 Dec 06 15:57:19 crc kubenswrapper[5003]: I1206 15:57:19.217600 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-w25db" event={"ID":"1a047c4d-003e-4668-9b96-945eab34ab68","Type":"ContainerDied","Data":"5cab9da0a1a459e6915d570312c7512355a1b9744939c5aec7b823b431cc45d8"} Dec 06 15:57:19 crc kubenswrapper[5003]: I1206 15:57:19.217673 5003 scope.go:117] "RemoveContainer" containerID="fda5b3a83c2db0cd7e8cd10cafa27a87f57daf7b92848cde69fdb9048350b316" Dec 06 15:57:19 crc kubenswrapper[5003]: I1206 15:57:19.219395 5003 scope.go:117] "RemoveContainer" containerID="5cab9da0a1a459e6915d570312c7512355a1b9744939c5aec7b823b431cc45d8" Dec 06 15:57:19 crc kubenswrapper[5003]: E1206 15:57:19.219737 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w25db_openshift-machine-config-operator(1a047c4d-003e-4668-9b96-945eab34ab68)\"" pod="openshift-machine-config-operator/machine-config-daemon-w25db" podUID="1a047c4d-003e-4668-9b96-945eab34ab68" Dec 06 15:57:19 crc kubenswrapper[5003]: I1206 15:57:19.220634 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vmz29" event={"ID":"0f119d07-a311-4290-9a4f-4282c9887c72","Type":"ContainerStarted","Data":"3b044f6bbb9c1101d5d09ff97e0644aa433c5126b3470c293df0b6e1adbba5f2"} Dec 06 15:57:20 crc kubenswrapper[5003]: I1206 15:57:20.229584 5003 generic.go:334] "Generic (PLEG): container finished" podID="0f119d07-a311-4290-9a4f-4282c9887c72" containerID="3b044f6bbb9c1101d5d09ff97e0644aa433c5126b3470c293df0b6e1adbba5f2" exitCode=0 Dec 06 15:57:20 crc kubenswrapper[5003]: I1206 15:57:20.229664 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vmz29" event={"ID":"0f119d07-a311-4290-9a4f-4282c9887c72","Type":"ContainerDied","Data":"3b044f6bbb9c1101d5d09ff97e0644aa433c5126b3470c293df0b6e1adbba5f2"} Dec 06 15:57:21 crc kubenswrapper[5003]: I1206 15:57:21.243369 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vmz29" event={"ID":"0f119d07-a311-4290-9a4f-4282c9887c72","Type":"ContainerStarted","Data":"b85cc708f6e0f5c7833ba6b1f7f98877fd17e4f454d3a1e94a18263a07c01689"} Dec 06 15:57:21 crc kubenswrapper[5003]: I1206 15:57:21.265278 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-vmz29" podStartSLOduration=1.85995478 podStartE2EDuration="5.265257664s" podCreationTimestamp="2025-12-06 15:57:16 +0000 UTC" firstStartedPulling="2025-12-06 15:57:17.203816023 +0000 UTC m=+1515.737170404" lastFinishedPulling="2025-12-06 15:57:20.609118907 +0000 UTC m=+1519.142473288" observedRunningTime="2025-12-06 15:57:21.261229463 +0000 UTC m=+1519.794583854" watchObservedRunningTime="2025-12-06 15:57:21.265257664 +0000 UTC m=+1519.798612045" Dec 06 15:57:26 crc kubenswrapper[5003]: I1206 15:57:26.625728 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-vmz29" Dec 06 15:57:26 crc kubenswrapper[5003]: I1206 15:57:26.626136 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-vmz29" Dec 06 15:57:26 crc kubenswrapper[5003]: I1206 15:57:26.661285 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-vmz29" Dec 06 15:57:27 crc kubenswrapper[5003]: I1206 15:57:27.307028 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-vmz29" Dec 06 15:57:27 crc kubenswrapper[5003]: I1206 15:57:27.347223 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-vmz29"] Dec 06 15:57:29 crc kubenswrapper[5003]: I1206 15:57:29.285847 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-vmz29" podUID="0f119d07-a311-4290-9a4f-4282c9887c72" containerName="registry-server" containerID="cri-o://b85cc708f6e0f5c7833ba6b1f7f98877fd17e4f454d3a1e94a18263a07c01689" gracePeriod=2 Dec 06 15:57:29 crc kubenswrapper[5003]: I1206 15:57:29.713013 5003 scope.go:117] "RemoveContainer" containerID="5cab9da0a1a459e6915d570312c7512355a1b9744939c5aec7b823b431cc45d8" Dec 06 15:57:29 crc kubenswrapper[5003]: E1206 15:57:29.713259 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w25db_openshift-machine-config-operator(1a047c4d-003e-4668-9b96-945eab34ab68)\"" pod="openshift-machine-config-operator/machine-config-daemon-w25db" podUID="1a047c4d-003e-4668-9b96-945eab34ab68" Dec 06 15:57:32 crc kubenswrapper[5003]: I1206 15:57:32.309669 5003 generic.go:334] "Generic (PLEG): container finished" podID="0f119d07-a311-4290-9a4f-4282c9887c72" containerID="b85cc708f6e0f5c7833ba6b1f7f98877fd17e4f454d3a1e94a18263a07c01689" exitCode=0 Dec 06 15:57:32 crc kubenswrapper[5003]: I1206 15:57:32.309753 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vmz29" event={"ID":"0f119d07-a311-4290-9a4f-4282c9887c72","Type":"ContainerDied","Data":"b85cc708f6e0f5c7833ba6b1f7f98877fd17e4f454d3a1e94a18263a07c01689"} Dec 06 15:57:32 crc kubenswrapper[5003]: I1206 15:57:32.772704 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-vmz29" Dec 06 15:57:32 crc kubenswrapper[5003]: I1206 15:57:32.900702 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0f119d07-a311-4290-9a4f-4282c9887c72-utilities\") pod \"0f119d07-a311-4290-9a4f-4282c9887c72\" (UID: \"0f119d07-a311-4290-9a4f-4282c9887c72\") " Dec 06 15:57:32 crc kubenswrapper[5003]: I1206 15:57:32.901126 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0f119d07-a311-4290-9a4f-4282c9887c72-catalog-content\") pod \"0f119d07-a311-4290-9a4f-4282c9887c72\" (UID: \"0f119d07-a311-4290-9a4f-4282c9887c72\") " Dec 06 15:57:32 crc kubenswrapper[5003]: I1206 15:57:32.901266 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qxssv\" (UniqueName: \"kubernetes.io/projected/0f119d07-a311-4290-9a4f-4282c9887c72-kube-api-access-qxssv\") pod \"0f119d07-a311-4290-9a4f-4282c9887c72\" (UID: \"0f119d07-a311-4290-9a4f-4282c9887c72\") " Dec 06 15:57:32 crc kubenswrapper[5003]: I1206 15:57:32.901544 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0f119d07-a311-4290-9a4f-4282c9887c72-utilities" (OuterVolumeSpecName: "utilities") pod "0f119d07-a311-4290-9a4f-4282c9887c72" (UID: "0f119d07-a311-4290-9a4f-4282c9887c72"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 15:57:32 crc kubenswrapper[5003]: I1206 15:57:32.907072 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0f119d07-a311-4290-9a4f-4282c9887c72-kube-api-access-qxssv" (OuterVolumeSpecName: "kube-api-access-qxssv") pod "0f119d07-a311-4290-9a4f-4282c9887c72" (UID: "0f119d07-a311-4290-9a4f-4282c9887c72"). InnerVolumeSpecName "kube-api-access-qxssv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 15:57:32 crc kubenswrapper[5003]: I1206 15:57:32.999512 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0f119d07-a311-4290-9a4f-4282c9887c72-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "0f119d07-a311-4290-9a4f-4282c9887c72" (UID: "0f119d07-a311-4290-9a4f-4282c9887c72"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 15:57:33 crc kubenswrapper[5003]: I1206 15:57:33.002626 5003 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0f119d07-a311-4290-9a4f-4282c9887c72-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 06 15:57:33 crc kubenswrapper[5003]: I1206 15:57:33.002665 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qxssv\" (UniqueName: \"kubernetes.io/projected/0f119d07-a311-4290-9a4f-4282c9887c72-kube-api-access-qxssv\") on node \"crc\" DevicePath \"\"" Dec 06 15:57:33 crc kubenswrapper[5003]: I1206 15:57:33.002683 5003 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0f119d07-a311-4290-9a4f-4282c9887c72-utilities\") on node \"crc\" DevicePath \"\"" Dec 06 15:57:33 crc kubenswrapper[5003]: I1206 15:57:33.319056 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vmz29" event={"ID":"0f119d07-a311-4290-9a4f-4282c9887c72","Type":"ContainerDied","Data":"c123935f93a0a461563cf1d04c078fdc8f0e20d5057468a18a42ddaf9d9e65e1"} Dec 06 15:57:33 crc kubenswrapper[5003]: I1206 15:57:33.319129 5003 scope.go:117] "RemoveContainer" containerID="b85cc708f6e0f5c7833ba6b1f7f98877fd17e4f454d3a1e94a18263a07c01689" Dec 06 15:57:33 crc kubenswrapper[5003]: I1206 15:57:33.320325 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-vmz29" Dec 06 15:57:33 crc kubenswrapper[5003]: I1206 15:57:33.334678 5003 scope.go:117] "RemoveContainer" containerID="3b044f6bbb9c1101d5d09ff97e0644aa433c5126b3470c293df0b6e1adbba5f2" Dec 06 15:57:33 crc kubenswrapper[5003]: I1206 15:57:33.352602 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-vmz29"] Dec 06 15:57:33 crc kubenswrapper[5003]: I1206 15:57:33.356149 5003 scope.go:117] "RemoveContainer" containerID="38543ae3377e3948519ab83e415fc8d76e2ad7381d811be68e79caf577540355" Dec 06 15:57:33 crc kubenswrapper[5003]: I1206 15:57:33.361379 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-vmz29"] Dec 06 15:57:33 crc kubenswrapper[5003]: I1206 15:57:33.720525 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0f119d07-a311-4290-9a4f-4282c9887c72" path="/var/lib/kubelet/pods/0f119d07-a311-4290-9a4f-4282c9887c72/volumes" Dec 06 15:57:36 crc kubenswrapper[5003]: I1206 15:57:36.381298 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-8gf5v"] Dec 06 15:57:36 crc kubenswrapper[5003]: E1206 15:57:36.382550 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0f119d07-a311-4290-9a4f-4282c9887c72" containerName="extract-content" Dec 06 15:57:36 crc kubenswrapper[5003]: I1206 15:57:36.382636 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="0f119d07-a311-4290-9a4f-4282c9887c72" containerName="extract-content" Dec 06 15:57:36 crc kubenswrapper[5003]: E1206 15:57:36.382695 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0f119d07-a311-4290-9a4f-4282c9887c72" containerName="registry-server" Dec 06 15:57:36 crc kubenswrapper[5003]: I1206 15:57:36.382746 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="0f119d07-a311-4290-9a4f-4282c9887c72" containerName="registry-server" Dec 06 15:57:36 crc kubenswrapper[5003]: E1206 15:57:36.382805 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0f119d07-a311-4290-9a4f-4282c9887c72" containerName="extract-utilities" Dec 06 15:57:36 crc kubenswrapper[5003]: I1206 15:57:36.382877 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="0f119d07-a311-4290-9a4f-4282c9887c72" containerName="extract-utilities" Dec 06 15:57:36 crc kubenswrapper[5003]: I1206 15:57:36.383079 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="0f119d07-a311-4290-9a4f-4282c9887c72" containerName="registry-server" Dec 06 15:57:36 crc kubenswrapper[5003]: I1206 15:57:36.383893 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-8gf5v" Dec 06 15:57:36 crc kubenswrapper[5003]: I1206 15:57:36.391467 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-8gf5v"] Dec 06 15:57:36 crc kubenswrapper[5003]: I1206 15:57:36.547717 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f48sd\" (UniqueName: \"kubernetes.io/projected/887b9550-63ae-4801-8d8f-7c4ec3073b13-kube-api-access-f48sd\") pod \"certified-operators-8gf5v\" (UID: \"887b9550-63ae-4801-8d8f-7c4ec3073b13\") " pod="openshift-marketplace/certified-operators-8gf5v" Dec 06 15:57:36 crc kubenswrapper[5003]: I1206 15:57:36.547781 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/887b9550-63ae-4801-8d8f-7c4ec3073b13-utilities\") pod \"certified-operators-8gf5v\" (UID: \"887b9550-63ae-4801-8d8f-7c4ec3073b13\") " pod="openshift-marketplace/certified-operators-8gf5v" Dec 06 15:57:36 crc kubenswrapper[5003]: I1206 15:57:36.547819 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/887b9550-63ae-4801-8d8f-7c4ec3073b13-catalog-content\") pod \"certified-operators-8gf5v\" (UID: \"887b9550-63ae-4801-8d8f-7c4ec3073b13\") " pod="openshift-marketplace/certified-operators-8gf5v" Dec 06 15:57:36 crc kubenswrapper[5003]: I1206 15:57:36.649678 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f48sd\" (UniqueName: \"kubernetes.io/projected/887b9550-63ae-4801-8d8f-7c4ec3073b13-kube-api-access-f48sd\") pod \"certified-operators-8gf5v\" (UID: \"887b9550-63ae-4801-8d8f-7c4ec3073b13\") " pod="openshift-marketplace/certified-operators-8gf5v" Dec 06 15:57:36 crc kubenswrapper[5003]: I1206 15:57:36.649741 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/887b9550-63ae-4801-8d8f-7c4ec3073b13-utilities\") pod \"certified-operators-8gf5v\" (UID: \"887b9550-63ae-4801-8d8f-7c4ec3073b13\") " pod="openshift-marketplace/certified-operators-8gf5v" Dec 06 15:57:36 crc kubenswrapper[5003]: I1206 15:57:36.649784 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/887b9550-63ae-4801-8d8f-7c4ec3073b13-catalog-content\") pod \"certified-operators-8gf5v\" (UID: \"887b9550-63ae-4801-8d8f-7c4ec3073b13\") " pod="openshift-marketplace/certified-operators-8gf5v" Dec 06 15:57:36 crc kubenswrapper[5003]: I1206 15:57:36.650424 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/887b9550-63ae-4801-8d8f-7c4ec3073b13-catalog-content\") pod \"certified-operators-8gf5v\" (UID: \"887b9550-63ae-4801-8d8f-7c4ec3073b13\") " pod="openshift-marketplace/certified-operators-8gf5v" Dec 06 15:57:36 crc kubenswrapper[5003]: I1206 15:57:36.650435 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/887b9550-63ae-4801-8d8f-7c4ec3073b13-utilities\") pod \"certified-operators-8gf5v\" (UID: \"887b9550-63ae-4801-8d8f-7c4ec3073b13\") " pod="openshift-marketplace/certified-operators-8gf5v" Dec 06 15:57:36 crc kubenswrapper[5003]: I1206 15:57:36.668594 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f48sd\" (UniqueName: \"kubernetes.io/projected/887b9550-63ae-4801-8d8f-7c4ec3073b13-kube-api-access-f48sd\") pod \"certified-operators-8gf5v\" (UID: \"887b9550-63ae-4801-8d8f-7c4ec3073b13\") " pod="openshift-marketplace/certified-operators-8gf5v" Dec 06 15:57:36 crc kubenswrapper[5003]: I1206 15:57:36.700421 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-8gf5v" Dec 06 15:57:37 crc kubenswrapper[5003]: I1206 15:57:37.005092 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-8gf5v"] Dec 06 15:57:37 crc kubenswrapper[5003]: I1206 15:57:37.343120 5003 generic.go:334] "Generic (PLEG): container finished" podID="887b9550-63ae-4801-8d8f-7c4ec3073b13" containerID="5b3b0082d3e2af5577054e610d5707337423c349d3974a22eddb644d41b7c23f" exitCode=0 Dec 06 15:57:37 crc kubenswrapper[5003]: I1206 15:57:37.343187 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8gf5v" event={"ID":"887b9550-63ae-4801-8d8f-7c4ec3073b13","Type":"ContainerDied","Data":"5b3b0082d3e2af5577054e610d5707337423c349d3974a22eddb644d41b7c23f"} Dec 06 15:57:37 crc kubenswrapper[5003]: I1206 15:57:37.343238 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8gf5v" event={"ID":"887b9550-63ae-4801-8d8f-7c4ec3073b13","Type":"ContainerStarted","Data":"9bd31a992e181a5422024bfae7ae3fe04d16fb3288c32f78ef0107f431cdde50"} Dec 06 15:57:39 crc kubenswrapper[5003]: I1206 15:57:39.355318 5003 generic.go:334] "Generic (PLEG): container finished" podID="887b9550-63ae-4801-8d8f-7c4ec3073b13" containerID="2dbc1be4ef2d1191f4d0fab69b039d8c93b1b09d1b68cdcaf1ea76dd18919092" exitCode=0 Dec 06 15:57:39 crc kubenswrapper[5003]: I1206 15:57:39.355428 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8gf5v" event={"ID":"887b9550-63ae-4801-8d8f-7c4ec3073b13","Type":"ContainerDied","Data":"2dbc1be4ef2d1191f4d0fab69b039d8c93b1b09d1b68cdcaf1ea76dd18919092"} Dec 06 15:57:40 crc kubenswrapper[5003]: I1206 15:57:40.364452 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8gf5v" event={"ID":"887b9550-63ae-4801-8d8f-7c4ec3073b13","Type":"ContainerStarted","Data":"a260289fa3a5d7a61d869c9e1b4607af876b9711f5cfd6e3e8193cf608dd46d5"} Dec 06 15:57:40 crc kubenswrapper[5003]: I1206 15:57:40.384557 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-8gf5v" podStartSLOduration=1.999290783 podStartE2EDuration="4.384536174s" podCreationTimestamp="2025-12-06 15:57:36 +0000 UTC" firstStartedPulling="2025-12-06 15:57:37.344950895 +0000 UTC m=+1535.878305266" lastFinishedPulling="2025-12-06 15:57:39.730196276 +0000 UTC m=+1538.263550657" observedRunningTime="2025-12-06 15:57:40.380420661 +0000 UTC m=+1538.913775072" watchObservedRunningTime="2025-12-06 15:57:40.384536174 +0000 UTC m=+1538.917890565" Dec 06 15:57:44 crc kubenswrapper[5003]: I1206 15:57:44.712248 5003 scope.go:117] "RemoveContainer" containerID="5cab9da0a1a459e6915d570312c7512355a1b9744939c5aec7b823b431cc45d8" Dec 06 15:57:44 crc kubenswrapper[5003]: E1206 15:57:44.712798 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w25db_openshift-machine-config-operator(1a047c4d-003e-4668-9b96-945eab34ab68)\"" pod="openshift-machine-config-operator/machine-config-daemon-w25db" podUID="1a047c4d-003e-4668-9b96-945eab34ab68" Dec 06 15:57:46 crc kubenswrapper[5003]: I1206 15:57:46.700925 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-8gf5v" Dec 06 15:57:46 crc kubenswrapper[5003]: I1206 15:57:46.701120 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-8gf5v" Dec 06 15:57:46 crc kubenswrapper[5003]: I1206 15:57:46.759864 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-8gf5v" Dec 06 15:57:47 crc kubenswrapper[5003]: I1206 15:57:47.440256 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-8gf5v" Dec 06 15:57:47 crc kubenswrapper[5003]: I1206 15:57:47.494094 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-8gf5v"] Dec 06 15:57:49 crc kubenswrapper[5003]: I1206 15:57:49.413475 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-8gf5v" podUID="887b9550-63ae-4801-8d8f-7c4ec3073b13" containerName="registry-server" containerID="cri-o://a260289fa3a5d7a61d869c9e1b4607af876b9711f5cfd6e3e8193cf608dd46d5" gracePeriod=2 Dec 06 15:57:51 crc kubenswrapper[5003]: I1206 15:57:51.140288 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-8gf5v" Dec 06 15:57:51 crc kubenswrapper[5003]: I1206 15:57:51.297637 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/887b9550-63ae-4801-8d8f-7c4ec3073b13-catalog-content\") pod \"887b9550-63ae-4801-8d8f-7c4ec3073b13\" (UID: \"887b9550-63ae-4801-8d8f-7c4ec3073b13\") " Dec 06 15:57:51 crc kubenswrapper[5003]: I1206 15:57:51.298102 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f48sd\" (UniqueName: \"kubernetes.io/projected/887b9550-63ae-4801-8d8f-7c4ec3073b13-kube-api-access-f48sd\") pod \"887b9550-63ae-4801-8d8f-7c4ec3073b13\" (UID: \"887b9550-63ae-4801-8d8f-7c4ec3073b13\") " Dec 06 15:57:51 crc kubenswrapper[5003]: I1206 15:57:51.298171 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/887b9550-63ae-4801-8d8f-7c4ec3073b13-utilities\") pod \"887b9550-63ae-4801-8d8f-7c4ec3073b13\" (UID: \"887b9550-63ae-4801-8d8f-7c4ec3073b13\") " Dec 06 15:57:51 crc kubenswrapper[5003]: I1206 15:57:51.299269 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/887b9550-63ae-4801-8d8f-7c4ec3073b13-utilities" (OuterVolumeSpecName: "utilities") pod "887b9550-63ae-4801-8d8f-7c4ec3073b13" (UID: "887b9550-63ae-4801-8d8f-7c4ec3073b13"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 15:57:51 crc kubenswrapper[5003]: I1206 15:57:51.299420 5003 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/887b9550-63ae-4801-8d8f-7c4ec3073b13-utilities\") on node \"crc\" DevicePath \"\"" Dec 06 15:57:51 crc kubenswrapper[5003]: I1206 15:57:51.309791 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/887b9550-63ae-4801-8d8f-7c4ec3073b13-kube-api-access-f48sd" (OuterVolumeSpecName: "kube-api-access-f48sd") pod "887b9550-63ae-4801-8d8f-7c4ec3073b13" (UID: "887b9550-63ae-4801-8d8f-7c4ec3073b13"). InnerVolumeSpecName "kube-api-access-f48sd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 15:57:51 crc kubenswrapper[5003]: I1206 15:57:51.362320 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/887b9550-63ae-4801-8d8f-7c4ec3073b13-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "887b9550-63ae-4801-8d8f-7c4ec3073b13" (UID: "887b9550-63ae-4801-8d8f-7c4ec3073b13"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 15:57:51 crc kubenswrapper[5003]: I1206 15:57:51.400692 5003 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/887b9550-63ae-4801-8d8f-7c4ec3073b13-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 06 15:57:51 crc kubenswrapper[5003]: I1206 15:57:51.400751 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f48sd\" (UniqueName: \"kubernetes.io/projected/887b9550-63ae-4801-8d8f-7c4ec3073b13-kube-api-access-f48sd\") on node \"crc\" DevicePath \"\"" Dec 06 15:57:51 crc kubenswrapper[5003]: I1206 15:57:51.428256 5003 generic.go:334] "Generic (PLEG): container finished" podID="887b9550-63ae-4801-8d8f-7c4ec3073b13" containerID="a260289fa3a5d7a61d869c9e1b4607af876b9711f5cfd6e3e8193cf608dd46d5" exitCode=0 Dec 06 15:57:51 crc kubenswrapper[5003]: I1206 15:57:51.428301 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8gf5v" event={"ID":"887b9550-63ae-4801-8d8f-7c4ec3073b13","Type":"ContainerDied","Data":"a260289fa3a5d7a61d869c9e1b4607af876b9711f5cfd6e3e8193cf608dd46d5"} Dec 06 15:57:51 crc kubenswrapper[5003]: I1206 15:57:51.428344 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8gf5v" event={"ID":"887b9550-63ae-4801-8d8f-7c4ec3073b13","Type":"ContainerDied","Data":"9bd31a992e181a5422024bfae7ae3fe04d16fb3288c32f78ef0107f431cdde50"} Dec 06 15:57:51 crc kubenswrapper[5003]: I1206 15:57:51.428344 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-8gf5v" Dec 06 15:57:51 crc kubenswrapper[5003]: I1206 15:57:51.428364 5003 scope.go:117] "RemoveContainer" containerID="a260289fa3a5d7a61d869c9e1b4607af876b9711f5cfd6e3e8193cf608dd46d5" Dec 06 15:57:51 crc kubenswrapper[5003]: I1206 15:57:51.446078 5003 scope.go:117] "RemoveContainer" containerID="2dbc1be4ef2d1191f4d0fab69b039d8c93b1b09d1b68cdcaf1ea76dd18919092" Dec 06 15:57:51 crc kubenswrapper[5003]: I1206 15:57:51.465632 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-8gf5v"] Dec 06 15:57:51 crc kubenswrapper[5003]: I1206 15:57:51.465901 5003 scope.go:117] "RemoveContainer" containerID="5b3b0082d3e2af5577054e610d5707337423c349d3974a22eddb644d41b7c23f" Dec 06 15:57:51 crc kubenswrapper[5003]: I1206 15:57:51.469214 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-8gf5v"] Dec 06 15:57:51 crc kubenswrapper[5003]: I1206 15:57:51.482717 5003 scope.go:117] "RemoveContainer" containerID="a260289fa3a5d7a61d869c9e1b4607af876b9711f5cfd6e3e8193cf608dd46d5" Dec 06 15:57:51 crc kubenswrapper[5003]: E1206 15:57:51.483522 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a260289fa3a5d7a61d869c9e1b4607af876b9711f5cfd6e3e8193cf608dd46d5\": container with ID starting with a260289fa3a5d7a61d869c9e1b4607af876b9711f5cfd6e3e8193cf608dd46d5 not found: ID does not exist" containerID="a260289fa3a5d7a61d869c9e1b4607af876b9711f5cfd6e3e8193cf608dd46d5" Dec 06 15:57:51 crc kubenswrapper[5003]: I1206 15:57:51.483632 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a260289fa3a5d7a61d869c9e1b4607af876b9711f5cfd6e3e8193cf608dd46d5"} err="failed to get container status \"a260289fa3a5d7a61d869c9e1b4607af876b9711f5cfd6e3e8193cf608dd46d5\": rpc error: code = NotFound desc = could not find container \"a260289fa3a5d7a61d869c9e1b4607af876b9711f5cfd6e3e8193cf608dd46d5\": container with ID starting with a260289fa3a5d7a61d869c9e1b4607af876b9711f5cfd6e3e8193cf608dd46d5 not found: ID does not exist" Dec 06 15:57:51 crc kubenswrapper[5003]: I1206 15:57:51.483661 5003 scope.go:117] "RemoveContainer" containerID="2dbc1be4ef2d1191f4d0fab69b039d8c93b1b09d1b68cdcaf1ea76dd18919092" Dec 06 15:57:51 crc kubenswrapper[5003]: E1206 15:57:51.484128 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2dbc1be4ef2d1191f4d0fab69b039d8c93b1b09d1b68cdcaf1ea76dd18919092\": container with ID starting with 2dbc1be4ef2d1191f4d0fab69b039d8c93b1b09d1b68cdcaf1ea76dd18919092 not found: ID does not exist" containerID="2dbc1be4ef2d1191f4d0fab69b039d8c93b1b09d1b68cdcaf1ea76dd18919092" Dec 06 15:57:51 crc kubenswrapper[5003]: I1206 15:57:51.484172 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2dbc1be4ef2d1191f4d0fab69b039d8c93b1b09d1b68cdcaf1ea76dd18919092"} err="failed to get container status \"2dbc1be4ef2d1191f4d0fab69b039d8c93b1b09d1b68cdcaf1ea76dd18919092\": rpc error: code = NotFound desc = could not find container \"2dbc1be4ef2d1191f4d0fab69b039d8c93b1b09d1b68cdcaf1ea76dd18919092\": container with ID starting with 2dbc1be4ef2d1191f4d0fab69b039d8c93b1b09d1b68cdcaf1ea76dd18919092 not found: ID does not exist" Dec 06 15:57:51 crc kubenswrapper[5003]: I1206 15:57:51.484208 5003 scope.go:117] "RemoveContainer" containerID="5b3b0082d3e2af5577054e610d5707337423c349d3974a22eddb644d41b7c23f" Dec 06 15:57:51 crc kubenswrapper[5003]: E1206 15:57:51.484569 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5b3b0082d3e2af5577054e610d5707337423c349d3974a22eddb644d41b7c23f\": container with ID starting with 5b3b0082d3e2af5577054e610d5707337423c349d3974a22eddb644d41b7c23f not found: ID does not exist" containerID="5b3b0082d3e2af5577054e610d5707337423c349d3974a22eddb644d41b7c23f" Dec 06 15:57:51 crc kubenswrapper[5003]: I1206 15:57:51.484595 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5b3b0082d3e2af5577054e610d5707337423c349d3974a22eddb644d41b7c23f"} err="failed to get container status \"5b3b0082d3e2af5577054e610d5707337423c349d3974a22eddb644d41b7c23f\": rpc error: code = NotFound desc = could not find container \"5b3b0082d3e2af5577054e610d5707337423c349d3974a22eddb644d41b7c23f\": container with ID starting with 5b3b0082d3e2af5577054e610d5707337423c349d3974a22eddb644d41b7c23f not found: ID does not exist" Dec 06 15:57:51 crc kubenswrapper[5003]: I1206 15:57:51.729937 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="887b9550-63ae-4801-8d8f-7c4ec3073b13" path="/var/lib/kubelet/pods/887b9550-63ae-4801-8d8f-7c4ec3073b13/volumes" Dec 06 15:57:55 crc kubenswrapper[5003]: I1206 15:57:55.711905 5003 scope.go:117] "RemoveContainer" containerID="5cab9da0a1a459e6915d570312c7512355a1b9744939c5aec7b823b431cc45d8" Dec 06 15:57:55 crc kubenswrapper[5003]: E1206 15:57:55.712452 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w25db_openshift-machine-config-operator(1a047c4d-003e-4668-9b96-945eab34ab68)\"" pod="openshift-machine-config-operator/machine-config-daemon-w25db" podUID="1a047c4d-003e-4668-9b96-945eab34ab68" Dec 06 15:58:05 crc kubenswrapper[5003]: I1206 15:58:05.339430 5003 scope.go:117] "RemoveContainer" containerID="1cf3e769f745333d6d3a40fd17567e83131f6f0403cd83b3b8f021350130ad47" Dec 06 15:58:05 crc kubenswrapper[5003]: I1206 15:58:05.383910 5003 scope.go:117] "RemoveContainer" containerID="ddb8e4fb15308ccb985beeda4649b52cc21565b37c13180a954123225c1287d4" Dec 06 15:58:05 crc kubenswrapper[5003]: I1206 15:58:05.404099 5003 scope.go:117] "RemoveContainer" containerID="cea255bff2e324f216b4ed1f552beaf6a971495b16f63648b6799eb625bab92d" Dec 06 15:58:09 crc kubenswrapper[5003]: I1206 15:58:09.712930 5003 scope.go:117] "RemoveContainer" containerID="5cab9da0a1a459e6915d570312c7512355a1b9744939c5aec7b823b431cc45d8" Dec 06 15:58:09 crc kubenswrapper[5003]: E1206 15:58:09.713466 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w25db_openshift-machine-config-operator(1a047c4d-003e-4668-9b96-945eab34ab68)\"" pod="openshift-machine-config-operator/machine-config-daemon-w25db" podUID="1a047c4d-003e-4668-9b96-945eab34ab68" Dec 06 15:58:22 crc kubenswrapper[5003]: I1206 15:58:22.712601 5003 scope.go:117] "RemoveContainer" containerID="5cab9da0a1a459e6915d570312c7512355a1b9744939c5aec7b823b431cc45d8" Dec 06 15:58:22 crc kubenswrapper[5003]: E1206 15:58:22.713321 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w25db_openshift-machine-config-operator(1a047c4d-003e-4668-9b96-945eab34ab68)\"" pod="openshift-machine-config-operator/machine-config-daemon-w25db" podUID="1a047c4d-003e-4668-9b96-945eab34ab68" Dec 06 15:58:33 crc kubenswrapper[5003]: I1206 15:58:33.712550 5003 scope.go:117] "RemoveContainer" containerID="5cab9da0a1a459e6915d570312c7512355a1b9744939c5aec7b823b431cc45d8" Dec 06 15:58:33 crc kubenswrapper[5003]: E1206 15:58:33.713311 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w25db_openshift-machine-config-operator(1a047c4d-003e-4668-9b96-945eab34ab68)\"" pod="openshift-machine-config-operator/machine-config-daemon-w25db" podUID="1a047c4d-003e-4668-9b96-945eab34ab68" Dec 06 15:58:37 crc kubenswrapper[5003]: I1206 15:58:37.736694 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-khbvq"] Dec 06 15:58:37 crc kubenswrapper[5003]: E1206 15:58:37.737508 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="887b9550-63ae-4801-8d8f-7c4ec3073b13" containerName="extract-content" Dec 06 15:58:37 crc kubenswrapper[5003]: I1206 15:58:37.737530 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="887b9550-63ae-4801-8d8f-7c4ec3073b13" containerName="extract-content" Dec 06 15:58:37 crc kubenswrapper[5003]: E1206 15:58:37.737560 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="887b9550-63ae-4801-8d8f-7c4ec3073b13" containerName="extract-utilities" Dec 06 15:58:37 crc kubenswrapper[5003]: I1206 15:58:37.737573 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="887b9550-63ae-4801-8d8f-7c4ec3073b13" containerName="extract-utilities" Dec 06 15:58:37 crc kubenswrapper[5003]: E1206 15:58:37.737592 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="887b9550-63ae-4801-8d8f-7c4ec3073b13" containerName="registry-server" Dec 06 15:58:37 crc kubenswrapper[5003]: I1206 15:58:37.737603 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="887b9550-63ae-4801-8d8f-7c4ec3073b13" containerName="registry-server" Dec 06 15:58:37 crc kubenswrapper[5003]: I1206 15:58:37.737944 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="887b9550-63ae-4801-8d8f-7c4ec3073b13" containerName="registry-server" Dec 06 15:58:37 crc kubenswrapper[5003]: I1206 15:58:37.739166 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-khbvq" Dec 06 15:58:37 crc kubenswrapper[5003]: I1206 15:58:37.744332 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-khbvq"] Dec 06 15:58:37 crc kubenswrapper[5003]: I1206 15:58:37.782694 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5f858b3f-6db7-42f8-80f0-dbef7a56b41c-catalog-content\") pod \"community-operators-khbvq\" (UID: \"5f858b3f-6db7-42f8-80f0-dbef7a56b41c\") " pod="openshift-marketplace/community-operators-khbvq" Dec 06 15:58:37 crc kubenswrapper[5003]: I1206 15:58:37.782929 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5f858b3f-6db7-42f8-80f0-dbef7a56b41c-utilities\") pod \"community-operators-khbvq\" (UID: \"5f858b3f-6db7-42f8-80f0-dbef7a56b41c\") " pod="openshift-marketplace/community-operators-khbvq" Dec 06 15:58:37 crc kubenswrapper[5003]: I1206 15:58:37.783098 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j2qk5\" (UniqueName: \"kubernetes.io/projected/5f858b3f-6db7-42f8-80f0-dbef7a56b41c-kube-api-access-j2qk5\") pod \"community-operators-khbvq\" (UID: \"5f858b3f-6db7-42f8-80f0-dbef7a56b41c\") " pod="openshift-marketplace/community-operators-khbvq" Dec 06 15:58:37 crc kubenswrapper[5003]: I1206 15:58:37.883695 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5f858b3f-6db7-42f8-80f0-dbef7a56b41c-catalog-content\") pod \"community-operators-khbvq\" (UID: \"5f858b3f-6db7-42f8-80f0-dbef7a56b41c\") " pod="openshift-marketplace/community-operators-khbvq" Dec 06 15:58:37 crc kubenswrapper[5003]: I1206 15:58:37.883756 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5f858b3f-6db7-42f8-80f0-dbef7a56b41c-utilities\") pod \"community-operators-khbvq\" (UID: \"5f858b3f-6db7-42f8-80f0-dbef7a56b41c\") " pod="openshift-marketplace/community-operators-khbvq" Dec 06 15:58:37 crc kubenswrapper[5003]: I1206 15:58:37.883788 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j2qk5\" (UniqueName: \"kubernetes.io/projected/5f858b3f-6db7-42f8-80f0-dbef7a56b41c-kube-api-access-j2qk5\") pod \"community-operators-khbvq\" (UID: \"5f858b3f-6db7-42f8-80f0-dbef7a56b41c\") " pod="openshift-marketplace/community-operators-khbvq" Dec 06 15:58:37 crc kubenswrapper[5003]: I1206 15:58:37.884198 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5f858b3f-6db7-42f8-80f0-dbef7a56b41c-utilities\") pod \"community-operators-khbvq\" (UID: \"5f858b3f-6db7-42f8-80f0-dbef7a56b41c\") " pod="openshift-marketplace/community-operators-khbvq" Dec 06 15:58:37 crc kubenswrapper[5003]: I1206 15:58:37.884289 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5f858b3f-6db7-42f8-80f0-dbef7a56b41c-catalog-content\") pod \"community-operators-khbvq\" (UID: \"5f858b3f-6db7-42f8-80f0-dbef7a56b41c\") " pod="openshift-marketplace/community-operators-khbvq" Dec 06 15:58:37 crc kubenswrapper[5003]: I1206 15:58:37.906933 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j2qk5\" (UniqueName: \"kubernetes.io/projected/5f858b3f-6db7-42f8-80f0-dbef7a56b41c-kube-api-access-j2qk5\") pod \"community-operators-khbvq\" (UID: \"5f858b3f-6db7-42f8-80f0-dbef7a56b41c\") " pod="openshift-marketplace/community-operators-khbvq" Dec 06 15:58:38 crc kubenswrapper[5003]: I1206 15:58:38.055413 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-khbvq" Dec 06 15:58:38 crc kubenswrapper[5003]: I1206 15:58:38.489769 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-khbvq"] Dec 06 15:58:38 crc kubenswrapper[5003]: I1206 15:58:38.730600 5003 generic.go:334] "Generic (PLEG): container finished" podID="5f858b3f-6db7-42f8-80f0-dbef7a56b41c" containerID="99a6e2cb70d8e89a326adb8ceaa5848959fb1d202ddcacb5cfae7526c3aec09f" exitCode=0 Dec 06 15:58:38 crc kubenswrapper[5003]: I1206 15:58:38.730662 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-khbvq" event={"ID":"5f858b3f-6db7-42f8-80f0-dbef7a56b41c","Type":"ContainerDied","Data":"99a6e2cb70d8e89a326adb8ceaa5848959fb1d202ddcacb5cfae7526c3aec09f"} Dec 06 15:58:38 crc kubenswrapper[5003]: I1206 15:58:38.730878 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-khbvq" event={"ID":"5f858b3f-6db7-42f8-80f0-dbef7a56b41c","Type":"ContainerStarted","Data":"7e270b01c89ed225678dded1b8b2ba6d7878710e0f4912fbffb422808c35d8aa"} Dec 06 15:58:39 crc kubenswrapper[5003]: I1206 15:58:39.742019 5003 generic.go:334] "Generic (PLEG): container finished" podID="5f858b3f-6db7-42f8-80f0-dbef7a56b41c" containerID="f27b5c4af6e641989806f8142bd350b788dd46e45dc1f301a28c8cc494e8acbd" exitCode=0 Dec 06 15:58:39 crc kubenswrapper[5003]: I1206 15:58:39.742100 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-khbvq" event={"ID":"5f858b3f-6db7-42f8-80f0-dbef7a56b41c","Type":"ContainerDied","Data":"f27b5c4af6e641989806f8142bd350b788dd46e45dc1f301a28c8cc494e8acbd"} Dec 06 15:58:40 crc kubenswrapper[5003]: I1206 15:58:40.752250 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-khbvq" event={"ID":"5f858b3f-6db7-42f8-80f0-dbef7a56b41c","Type":"ContainerStarted","Data":"c5331055c33c8c0c71502f7d9337aa23a29db408a4a9e9f57895aec66fb57bff"} Dec 06 15:58:40 crc kubenswrapper[5003]: I1206 15:58:40.788544 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-khbvq" podStartSLOduration=2.370835854 podStartE2EDuration="3.788479382s" podCreationTimestamp="2025-12-06 15:58:37 +0000 UTC" firstStartedPulling="2025-12-06 15:58:38.732937364 +0000 UTC m=+1597.266291745" lastFinishedPulling="2025-12-06 15:58:40.150580852 +0000 UTC m=+1598.683935273" observedRunningTime="2025-12-06 15:58:40.785164691 +0000 UTC m=+1599.318519152" watchObservedRunningTime="2025-12-06 15:58:40.788479382 +0000 UTC m=+1599.321833803" Dec 06 15:58:44 crc kubenswrapper[5003]: I1206 15:58:44.712799 5003 scope.go:117] "RemoveContainer" containerID="5cab9da0a1a459e6915d570312c7512355a1b9744939c5aec7b823b431cc45d8" Dec 06 15:58:44 crc kubenswrapper[5003]: E1206 15:58:44.713527 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w25db_openshift-machine-config-operator(1a047c4d-003e-4668-9b96-945eab34ab68)\"" pod="openshift-machine-config-operator/machine-config-daemon-w25db" podUID="1a047c4d-003e-4668-9b96-945eab34ab68" Dec 06 15:58:48 crc kubenswrapper[5003]: I1206 15:58:48.056048 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-khbvq" Dec 06 15:58:48 crc kubenswrapper[5003]: I1206 15:58:48.056394 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-khbvq" Dec 06 15:58:48 crc kubenswrapper[5003]: I1206 15:58:48.219696 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-khbvq" Dec 06 15:58:48 crc kubenswrapper[5003]: I1206 15:58:48.840274 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-khbvq" Dec 06 15:58:48 crc kubenswrapper[5003]: I1206 15:58:48.880763 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-khbvq"] Dec 06 15:58:50 crc kubenswrapper[5003]: I1206 15:58:50.815267 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-khbvq" podUID="5f858b3f-6db7-42f8-80f0-dbef7a56b41c" containerName="registry-server" containerID="cri-o://c5331055c33c8c0c71502f7d9337aa23a29db408a4a9e9f57895aec66fb57bff" gracePeriod=2 Dec 06 15:58:51 crc kubenswrapper[5003]: I1206 15:58:51.715636 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-khbvq" Dec 06 15:58:51 crc kubenswrapper[5003]: I1206 15:58:51.844813 5003 generic.go:334] "Generic (PLEG): container finished" podID="5f858b3f-6db7-42f8-80f0-dbef7a56b41c" containerID="c5331055c33c8c0c71502f7d9337aa23a29db408a4a9e9f57895aec66fb57bff" exitCode=0 Dec 06 15:58:51 crc kubenswrapper[5003]: I1206 15:58:51.844900 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-khbvq" event={"ID":"5f858b3f-6db7-42f8-80f0-dbef7a56b41c","Type":"ContainerDied","Data":"c5331055c33c8c0c71502f7d9337aa23a29db408a4a9e9f57895aec66fb57bff"} Dec 06 15:58:51 crc kubenswrapper[5003]: I1206 15:58:51.844943 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-khbvq" event={"ID":"5f858b3f-6db7-42f8-80f0-dbef7a56b41c","Type":"ContainerDied","Data":"7e270b01c89ed225678dded1b8b2ba6d7878710e0f4912fbffb422808c35d8aa"} Dec 06 15:58:51 crc kubenswrapper[5003]: I1206 15:58:51.844970 5003 scope.go:117] "RemoveContainer" containerID="c5331055c33c8c0c71502f7d9337aa23a29db408a4a9e9f57895aec66fb57bff" Dec 06 15:58:51 crc kubenswrapper[5003]: I1206 15:58:51.844971 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-khbvq" Dec 06 15:58:51 crc kubenswrapper[5003]: I1206 15:58:51.866349 5003 scope.go:117] "RemoveContainer" containerID="f27b5c4af6e641989806f8142bd350b788dd46e45dc1f301a28c8cc494e8acbd" Dec 06 15:58:51 crc kubenswrapper[5003]: I1206 15:58:51.880476 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j2qk5\" (UniqueName: \"kubernetes.io/projected/5f858b3f-6db7-42f8-80f0-dbef7a56b41c-kube-api-access-j2qk5\") pod \"5f858b3f-6db7-42f8-80f0-dbef7a56b41c\" (UID: \"5f858b3f-6db7-42f8-80f0-dbef7a56b41c\") " Dec 06 15:58:51 crc kubenswrapper[5003]: I1206 15:58:51.880598 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5f858b3f-6db7-42f8-80f0-dbef7a56b41c-catalog-content\") pod \"5f858b3f-6db7-42f8-80f0-dbef7a56b41c\" (UID: \"5f858b3f-6db7-42f8-80f0-dbef7a56b41c\") " Dec 06 15:58:51 crc kubenswrapper[5003]: I1206 15:58:51.880706 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5f858b3f-6db7-42f8-80f0-dbef7a56b41c-utilities\") pod \"5f858b3f-6db7-42f8-80f0-dbef7a56b41c\" (UID: \"5f858b3f-6db7-42f8-80f0-dbef7a56b41c\") " Dec 06 15:58:51 crc kubenswrapper[5003]: I1206 15:58:51.882741 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5f858b3f-6db7-42f8-80f0-dbef7a56b41c-utilities" (OuterVolumeSpecName: "utilities") pod "5f858b3f-6db7-42f8-80f0-dbef7a56b41c" (UID: "5f858b3f-6db7-42f8-80f0-dbef7a56b41c"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 15:58:51 crc kubenswrapper[5003]: I1206 15:58:51.885508 5003 scope.go:117] "RemoveContainer" containerID="99a6e2cb70d8e89a326adb8ceaa5848959fb1d202ddcacb5cfae7526c3aec09f" Dec 06 15:58:51 crc kubenswrapper[5003]: I1206 15:58:51.896605 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5f858b3f-6db7-42f8-80f0-dbef7a56b41c-kube-api-access-j2qk5" (OuterVolumeSpecName: "kube-api-access-j2qk5") pod "5f858b3f-6db7-42f8-80f0-dbef7a56b41c" (UID: "5f858b3f-6db7-42f8-80f0-dbef7a56b41c"). InnerVolumeSpecName "kube-api-access-j2qk5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 15:58:51 crc kubenswrapper[5003]: I1206 15:58:51.927858 5003 scope.go:117] "RemoveContainer" containerID="c5331055c33c8c0c71502f7d9337aa23a29db408a4a9e9f57895aec66fb57bff" Dec 06 15:58:51 crc kubenswrapper[5003]: E1206 15:58:51.928438 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c5331055c33c8c0c71502f7d9337aa23a29db408a4a9e9f57895aec66fb57bff\": container with ID starting with c5331055c33c8c0c71502f7d9337aa23a29db408a4a9e9f57895aec66fb57bff not found: ID does not exist" containerID="c5331055c33c8c0c71502f7d9337aa23a29db408a4a9e9f57895aec66fb57bff" Dec 06 15:58:51 crc kubenswrapper[5003]: I1206 15:58:51.928519 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c5331055c33c8c0c71502f7d9337aa23a29db408a4a9e9f57895aec66fb57bff"} err="failed to get container status \"c5331055c33c8c0c71502f7d9337aa23a29db408a4a9e9f57895aec66fb57bff\": rpc error: code = NotFound desc = could not find container \"c5331055c33c8c0c71502f7d9337aa23a29db408a4a9e9f57895aec66fb57bff\": container with ID starting with c5331055c33c8c0c71502f7d9337aa23a29db408a4a9e9f57895aec66fb57bff not found: ID does not exist" Dec 06 15:58:51 crc kubenswrapper[5003]: I1206 15:58:51.928555 5003 scope.go:117] "RemoveContainer" containerID="f27b5c4af6e641989806f8142bd350b788dd46e45dc1f301a28c8cc494e8acbd" Dec 06 15:58:51 crc kubenswrapper[5003]: E1206 15:58:51.928875 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f27b5c4af6e641989806f8142bd350b788dd46e45dc1f301a28c8cc494e8acbd\": container with ID starting with f27b5c4af6e641989806f8142bd350b788dd46e45dc1f301a28c8cc494e8acbd not found: ID does not exist" containerID="f27b5c4af6e641989806f8142bd350b788dd46e45dc1f301a28c8cc494e8acbd" Dec 06 15:58:51 crc kubenswrapper[5003]: I1206 15:58:51.928915 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f27b5c4af6e641989806f8142bd350b788dd46e45dc1f301a28c8cc494e8acbd"} err="failed to get container status \"f27b5c4af6e641989806f8142bd350b788dd46e45dc1f301a28c8cc494e8acbd\": rpc error: code = NotFound desc = could not find container \"f27b5c4af6e641989806f8142bd350b788dd46e45dc1f301a28c8cc494e8acbd\": container with ID starting with f27b5c4af6e641989806f8142bd350b788dd46e45dc1f301a28c8cc494e8acbd not found: ID does not exist" Dec 06 15:58:51 crc kubenswrapper[5003]: I1206 15:58:51.928940 5003 scope.go:117] "RemoveContainer" containerID="99a6e2cb70d8e89a326adb8ceaa5848959fb1d202ddcacb5cfae7526c3aec09f" Dec 06 15:58:51 crc kubenswrapper[5003]: E1206 15:58:51.929405 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"99a6e2cb70d8e89a326adb8ceaa5848959fb1d202ddcacb5cfae7526c3aec09f\": container with ID starting with 99a6e2cb70d8e89a326adb8ceaa5848959fb1d202ddcacb5cfae7526c3aec09f not found: ID does not exist" containerID="99a6e2cb70d8e89a326adb8ceaa5848959fb1d202ddcacb5cfae7526c3aec09f" Dec 06 15:58:51 crc kubenswrapper[5003]: I1206 15:58:51.929473 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"99a6e2cb70d8e89a326adb8ceaa5848959fb1d202ddcacb5cfae7526c3aec09f"} err="failed to get container status \"99a6e2cb70d8e89a326adb8ceaa5848959fb1d202ddcacb5cfae7526c3aec09f\": rpc error: code = NotFound desc = could not find container \"99a6e2cb70d8e89a326adb8ceaa5848959fb1d202ddcacb5cfae7526c3aec09f\": container with ID starting with 99a6e2cb70d8e89a326adb8ceaa5848959fb1d202ddcacb5cfae7526c3aec09f not found: ID does not exist" Dec 06 15:58:51 crc kubenswrapper[5003]: I1206 15:58:51.962777 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5f858b3f-6db7-42f8-80f0-dbef7a56b41c-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5f858b3f-6db7-42f8-80f0-dbef7a56b41c" (UID: "5f858b3f-6db7-42f8-80f0-dbef7a56b41c"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 15:58:51 crc kubenswrapper[5003]: I1206 15:58:51.983474 5003 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5f858b3f-6db7-42f8-80f0-dbef7a56b41c-utilities\") on node \"crc\" DevicePath \"\"" Dec 06 15:58:51 crc kubenswrapper[5003]: I1206 15:58:51.983571 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j2qk5\" (UniqueName: \"kubernetes.io/projected/5f858b3f-6db7-42f8-80f0-dbef7a56b41c-kube-api-access-j2qk5\") on node \"crc\" DevicePath \"\"" Dec 06 15:58:51 crc kubenswrapper[5003]: I1206 15:58:51.983594 5003 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5f858b3f-6db7-42f8-80f0-dbef7a56b41c-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 06 15:58:52 crc kubenswrapper[5003]: I1206 15:58:52.195782 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-khbvq"] Dec 06 15:58:52 crc kubenswrapper[5003]: I1206 15:58:52.200084 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-khbvq"] Dec 06 15:58:53 crc kubenswrapper[5003]: I1206 15:58:53.723580 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5f858b3f-6db7-42f8-80f0-dbef7a56b41c" path="/var/lib/kubelet/pods/5f858b3f-6db7-42f8-80f0-dbef7a56b41c/volumes" Dec 06 15:58:58 crc kubenswrapper[5003]: I1206 15:58:58.712658 5003 scope.go:117] "RemoveContainer" containerID="5cab9da0a1a459e6915d570312c7512355a1b9744939c5aec7b823b431cc45d8" Dec 06 15:58:58 crc kubenswrapper[5003]: E1206 15:58:58.713015 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w25db_openshift-machine-config-operator(1a047c4d-003e-4668-9b96-945eab34ab68)\"" pod="openshift-machine-config-operator/machine-config-daemon-w25db" podUID="1a047c4d-003e-4668-9b96-945eab34ab68" Dec 06 15:59:11 crc kubenswrapper[5003]: I1206 15:59:11.718231 5003 scope.go:117] "RemoveContainer" containerID="5cab9da0a1a459e6915d570312c7512355a1b9744939c5aec7b823b431cc45d8" Dec 06 15:59:11 crc kubenswrapper[5003]: E1206 15:59:11.719292 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w25db_openshift-machine-config-operator(1a047c4d-003e-4668-9b96-945eab34ab68)\"" pod="openshift-machine-config-operator/machine-config-daemon-w25db" podUID="1a047c4d-003e-4668-9b96-945eab34ab68" Dec 06 15:59:22 crc kubenswrapper[5003]: I1206 15:59:22.712798 5003 scope.go:117] "RemoveContainer" containerID="5cab9da0a1a459e6915d570312c7512355a1b9744939c5aec7b823b431cc45d8" Dec 06 15:59:22 crc kubenswrapper[5003]: E1206 15:59:22.713501 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w25db_openshift-machine-config-operator(1a047c4d-003e-4668-9b96-945eab34ab68)\"" pod="openshift-machine-config-operator/machine-config-daemon-w25db" podUID="1a047c4d-003e-4668-9b96-945eab34ab68" Dec 06 15:59:33 crc kubenswrapper[5003]: I1206 15:59:33.713171 5003 scope.go:117] "RemoveContainer" containerID="5cab9da0a1a459e6915d570312c7512355a1b9744939c5aec7b823b431cc45d8" Dec 06 15:59:33 crc kubenswrapper[5003]: E1206 15:59:33.714336 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w25db_openshift-machine-config-operator(1a047c4d-003e-4668-9b96-945eab34ab68)\"" pod="openshift-machine-config-operator/machine-config-daemon-w25db" podUID="1a047c4d-003e-4668-9b96-945eab34ab68" Dec 06 15:59:34 crc kubenswrapper[5003]: I1206 15:59:34.599427 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-lfx5x/must-gather-s2984"] Dec 06 15:59:34 crc kubenswrapper[5003]: E1206 15:59:34.599771 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5f858b3f-6db7-42f8-80f0-dbef7a56b41c" containerName="extract-utilities" Dec 06 15:59:34 crc kubenswrapper[5003]: I1206 15:59:34.599799 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="5f858b3f-6db7-42f8-80f0-dbef7a56b41c" containerName="extract-utilities" Dec 06 15:59:34 crc kubenswrapper[5003]: E1206 15:59:34.599832 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5f858b3f-6db7-42f8-80f0-dbef7a56b41c" containerName="registry-server" Dec 06 15:59:34 crc kubenswrapper[5003]: I1206 15:59:34.599846 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="5f858b3f-6db7-42f8-80f0-dbef7a56b41c" containerName="registry-server" Dec 06 15:59:34 crc kubenswrapper[5003]: E1206 15:59:34.599882 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5f858b3f-6db7-42f8-80f0-dbef7a56b41c" containerName="extract-content" Dec 06 15:59:34 crc kubenswrapper[5003]: I1206 15:59:34.599896 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="5f858b3f-6db7-42f8-80f0-dbef7a56b41c" containerName="extract-content" Dec 06 15:59:34 crc kubenswrapper[5003]: I1206 15:59:34.600081 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="5f858b3f-6db7-42f8-80f0-dbef7a56b41c" containerName="registry-server" Dec 06 15:59:34 crc kubenswrapper[5003]: I1206 15:59:34.601065 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-lfx5x/must-gather-s2984" Dec 06 15:59:34 crc kubenswrapper[5003]: I1206 15:59:34.606294 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-lfx5x"/"kube-root-ca.crt" Dec 06 15:59:34 crc kubenswrapper[5003]: I1206 15:59:34.606455 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-lfx5x"/"openshift-service-ca.crt" Dec 06 15:59:34 crc kubenswrapper[5003]: I1206 15:59:34.677607 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dktb4\" (UniqueName: \"kubernetes.io/projected/148cb75b-a947-408a-bfa1-0dd81161f43b-kube-api-access-dktb4\") pod \"must-gather-s2984\" (UID: \"148cb75b-a947-408a-bfa1-0dd81161f43b\") " pod="openshift-must-gather-lfx5x/must-gather-s2984" Dec 06 15:59:34 crc kubenswrapper[5003]: I1206 15:59:34.677727 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/148cb75b-a947-408a-bfa1-0dd81161f43b-must-gather-output\") pod \"must-gather-s2984\" (UID: \"148cb75b-a947-408a-bfa1-0dd81161f43b\") " pod="openshift-must-gather-lfx5x/must-gather-s2984" Dec 06 15:59:34 crc kubenswrapper[5003]: I1206 15:59:34.678069 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-lfx5x/must-gather-s2984"] Dec 06 15:59:34 crc kubenswrapper[5003]: I1206 15:59:34.779197 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/148cb75b-a947-408a-bfa1-0dd81161f43b-must-gather-output\") pod \"must-gather-s2984\" (UID: \"148cb75b-a947-408a-bfa1-0dd81161f43b\") " pod="openshift-must-gather-lfx5x/must-gather-s2984" Dec 06 15:59:34 crc kubenswrapper[5003]: I1206 15:59:34.779302 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dktb4\" (UniqueName: \"kubernetes.io/projected/148cb75b-a947-408a-bfa1-0dd81161f43b-kube-api-access-dktb4\") pod \"must-gather-s2984\" (UID: \"148cb75b-a947-408a-bfa1-0dd81161f43b\") " pod="openshift-must-gather-lfx5x/must-gather-s2984" Dec 06 15:59:34 crc kubenswrapper[5003]: I1206 15:59:34.779712 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/148cb75b-a947-408a-bfa1-0dd81161f43b-must-gather-output\") pod \"must-gather-s2984\" (UID: \"148cb75b-a947-408a-bfa1-0dd81161f43b\") " pod="openshift-must-gather-lfx5x/must-gather-s2984" Dec 06 15:59:34 crc kubenswrapper[5003]: I1206 15:59:34.799464 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dktb4\" (UniqueName: \"kubernetes.io/projected/148cb75b-a947-408a-bfa1-0dd81161f43b-kube-api-access-dktb4\") pod \"must-gather-s2984\" (UID: \"148cb75b-a947-408a-bfa1-0dd81161f43b\") " pod="openshift-must-gather-lfx5x/must-gather-s2984" Dec 06 15:59:34 crc kubenswrapper[5003]: I1206 15:59:34.925920 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-lfx5x/must-gather-s2984" Dec 06 15:59:35 crc kubenswrapper[5003]: I1206 15:59:35.359914 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-lfx5x/must-gather-s2984"] Dec 06 15:59:36 crc kubenswrapper[5003]: I1206 15:59:36.118696 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-lfx5x/must-gather-s2984" event={"ID":"148cb75b-a947-408a-bfa1-0dd81161f43b","Type":"ContainerStarted","Data":"1ae5685ee45d6f9bd40604b408b35342740b2f743a3b82a789534cab2c4b0946"} Dec 06 15:59:36 crc kubenswrapper[5003]: I1206 15:59:36.118738 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-lfx5x/must-gather-s2984" event={"ID":"148cb75b-a947-408a-bfa1-0dd81161f43b","Type":"ContainerStarted","Data":"0f46d7d349341a206352e3716b0f3eeae110c16f75d20157b96ed6d898e8a5ab"} Dec 06 15:59:36 crc kubenswrapper[5003]: I1206 15:59:36.118749 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-lfx5x/must-gather-s2984" event={"ID":"148cb75b-a947-408a-bfa1-0dd81161f43b","Type":"ContainerStarted","Data":"373c66b11ef8b9e92c73b4c09b5883c19d95116fa91e8e639215885557866d7a"} Dec 06 15:59:36 crc kubenswrapper[5003]: I1206 15:59:36.135293 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-lfx5x/must-gather-s2984" podStartSLOduration=2.135277476 podStartE2EDuration="2.135277476s" podCreationTimestamp="2025-12-06 15:59:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 15:59:36.132909951 +0000 UTC m=+1654.666264332" watchObservedRunningTime="2025-12-06 15:59:36.135277476 +0000 UTC m=+1654.668631857" Dec 06 15:59:47 crc kubenswrapper[5003]: I1206 15:59:47.713465 5003 scope.go:117] "RemoveContainer" containerID="5cab9da0a1a459e6915d570312c7512355a1b9744939c5aec7b823b431cc45d8" Dec 06 15:59:47 crc kubenswrapper[5003]: E1206 15:59:47.714277 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w25db_openshift-machine-config-operator(1a047c4d-003e-4668-9b96-945eab34ab68)\"" pod="openshift-machine-config-operator/machine-config-daemon-w25db" podUID="1a047c4d-003e-4668-9b96-945eab34ab68" Dec 06 15:59:59 crc kubenswrapper[5003]: I1206 15:59:59.712822 5003 scope.go:117] "RemoveContainer" containerID="5cab9da0a1a459e6915d570312c7512355a1b9744939c5aec7b823b431cc45d8" Dec 06 15:59:59 crc kubenswrapper[5003]: E1206 15:59:59.713559 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w25db_openshift-machine-config-operator(1a047c4d-003e-4668-9b96-945eab34ab68)\"" pod="openshift-machine-config-operator/machine-config-daemon-w25db" podUID="1a047c4d-003e-4668-9b96-945eab34ab68" Dec 06 16:00:00 crc kubenswrapper[5003]: I1206 16:00:00.144916 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29417280-nnjd6"] Dec 06 16:00:00 crc kubenswrapper[5003]: I1206 16:00:00.147371 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29417280-nnjd6" Dec 06 16:00:00 crc kubenswrapper[5003]: I1206 16:00:00.151841 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 06 16:00:00 crc kubenswrapper[5003]: I1206 16:00:00.152079 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 06 16:00:00 crc kubenswrapper[5003]: I1206 16:00:00.164921 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29417280-nnjd6"] Dec 06 16:00:00 crc kubenswrapper[5003]: I1206 16:00:00.323062 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/4d39c7ff-1dd2-4b57-83f8-5d049fa905d4-config-volume\") pod \"collect-profiles-29417280-nnjd6\" (UID: \"4d39c7ff-1dd2-4b57-83f8-5d049fa905d4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29417280-nnjd6" Dec 06 16:00:00 crc kubenswrapper[5003]: I1206 16:00:00.323204 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mtzx2\" (UniqueName: \"kubernetes.io/projected/4d39c7ff-1dd2-4b57-83f8-5d049fa905d4-kube-api-access-mtzx2\") pod \"collect-profiles-29417280-nnjd6\" (UID: \"4d39c7ff-1dd2-4b57-83f8-5d049fa905d4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29417280-nnjd6" Dec 06 16:00:00 crc kubenswrapper[5003]: I1206 16:00:00.323291 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/4d39c7ff-1dd2-4b57-83f8-5d049fa905d4-secret-volume\") pod \"collect-profiles-29417280-nnjd6\" (UID: \"4d39c7ff-1dd2-4b57-83f8-5d049fa905d4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29417280-nnjd6" Dec 06 16:00:00 crc kubenswrapper[5003]: I1206 16:00:00.424280 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/4d39c7ff-1dd2-4b57-83f8-5d049fa905d4-secret-volume\") pod \"collect-profiles-29417280-nnjd6\" (UID: \"4d39c7ff-1dd2-4b57-83f8-5d049fa905d4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29417280-nnjd6" Dec 06 16:00:00 crc kubenswrapper[5003]: I1206 16:00:00.424354 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/4d39c7ff-1dd2-4b57-83f8-5d049fa905d4-config-volume\") pod \"collect-profiles-29417280-nnjd6\" (UID: \"4d39c7ff-1dd2-4b57-83f8-5d049fa905d4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29417280-nnjd6" Dec 06 16:00:00 crc kubenswrapper[5003]: I1206 16:00:00.424434 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mtzx2\" (UniqueName: \"kubernetes.io/projected/4d39c7ff-1dd2-4b57-83f8-5d049fa905d4-kube-api-access-mtzx2\") pod \"collect-profiles-29417280-nnjd6\" (UID: \"4d39c7ff-1dd2-4b57-83f8-5d049fa905d4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29417280-nnjd6" Dec 06 16:00:00 crc kubenswrapper[5003]: I1206 16:00:00.425271 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/4d39c7ff-1dd2-4b57-83f8-5d049fa905d4-config-volume\") pod \"collect-profiles-29417280-nnjd6\" (UID: \"4d39c7ff-1dd2-4b57-83f8-5d049fa905d4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29417280-nnjd6" Dec 06 16:00:00 crc kubenswrapper[5003]: I1206 16:00:00.435876 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/4d39c7ff-1dd2-4b57-83f8-5d049fa905d4-secret-volume\") pod \"collect-profiles-29417280-nnjd6\" (UID: \"4d39c7ff-1dd2-4b57-83f8-5d049fa905d4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29417280-nnjd6" Dec 06 16:00:00 crc kubenswrapper[5003]: I1206 16:00:00.445544 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mtzx2\" (UniqueName: \"kubernetes.io/projected/4d39c7ff-1dd2-4b57-83f8-5d049fa905d4-kube-api-access-mtzx2\") pod \"collect-profiles-29417280-nnjd6\" (UID: \"4d39c7ff-1dd2-4b57-83f8-5d049fa905d4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29417280-nnjd6" Dec 06 16:00:00 crc kubenswrapper[5003]: I1206 16:00:00.482741 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29417280-nnjd6" Dec 06 16:00:00 crc kubenswrapper[5003]: I1206 16:00:00.690734 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29417280-nnjd6"] Dec 06 16:00:01 crc kubenswrapper[5003]: I1206 16:00:01.261316 5003 generic.go:334] "Generic (PLEG): container finished" podID="4d39c7ff-1dd2-4b57-83f8-5d049fa905d4" containerID="100541e6ee151f5033ea305a00256728fcf759be12c4b6134f4eb72d30bdca72" exitCode=0 Dec 06 16:00:01 crc kubenswrapper[5003]: I1206 16:00:01.261372 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29417280-nnjd6" event={"ID":"4d39c7ff-1dd2-4b57-83f8-5d049fa905d4","Type":"ContainerDied","Data":"100541e6ee151f5033ea305a00256728fcf759be12c4b6134f4eb72d30bdca72"} Dec 06 16:00:01 crc kubenswrapper[5003]: I1206 16:00:01.261413 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29417280-nnjd6" event={"ID":"4d39c7ff-1dd2-4b57-83f8-5d049fa905d4","Type":"ContainerStarted","Data":"ae9a714ba7d4f1bbe74b733893c37cc81d24a7477ed73f9ead50ec7d7cd40204"} Dec 06 16:00:02 crc kubenswrapper[5003]: I1206 16:00:02.517070 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29417280-nnjd6" Dec 06 16:00:02 crc kubenswrapper[5003]: I1206 16:00:02.653060 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/4d39c7ff-1dd2-4b57-83f8-5d049fa905d4-config-volume\") pod \"4d39c7ff-1dd2-4b57-83f8-5d049fa905d4\" (UID: \"4d39c7ff-1dd2-4b57-83f8-5d049fa905d4\") " Dec 06 16:00:02 crc kubenswrapper[5003]: I1206 16:00:02.653146 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/4d39c7ff-1dd2-4b57-83f8-5d049fa905d4-secret-volume\") pod \"4d39c7ff-1dd2-4b57-83f8-5d049fa905d4\" (UID: \"4d39c7ff-1dd2-4b57-83f8-5d049fa905d4\") " Dec 06 16:00:02 crc kubenswrapper[5003]: I1206 16:00:02.653263 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mtzx2\" (UniqueName: \"kubernetes.io/projected/4d39c7ff-1dd2-4b57-83f8-5d049fa905d4-kube-api-access-mtzx2\") pod \"4d39c7ff-1dd2-4b57-83f8-5d049fa905d4\" (UID: \"4d39c7ff-1dd2-4b57-83f8-5d049fa905d4\") " Dec 06 16:00:02 crc kubenswrapper[5003]: I1206 16:00:02.654127 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4d39c7ff-1dd2-4b57-83f8-5d049fa905d4-config-volume" (OuterVolumeSpecName: "config-volume") pod "4d39c7ff-1dd2-4b57-83f8-5d049fa905d4" (UID: "4d39c7ff-1dd2-4b57-83f8-5d049fa905d4"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 16:00:02 crc kubenswrapper[5003]: I1206 16:00:02.658890 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4d39c7ff-1dd2-4b57-83f8-5d049fa905d4-kube-api-access-mtzx2" (OuterVolumeSpecName: "kube-api-access-mtzx2") pod "4d39c7ff-1dd2-4b57-83f8-5d049fa905d4" (UID: "4d39c7ff-1dd2-4b57-83f8-5d049fa905d4"). InnerVolumeSpecName "kube-api-access-mtzx2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 16:00:02 crc kubenswrapper[5003]: I1206 16:00:02.659022 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4d39c7ff-1dd2-4b57-83f8-5d049fa905d4-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "4d39c7ff-1dd2-4b57-83f8-5d049fa905d4" (UID: "4d39c7ff-1dd2-4b57-83f8-5d049fa905d4"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 16:00:02 crc kubenswrapper[5003]: I1206 16:00:02.754453 5003 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/4d39c7ff-1dd2-4b57-83f8-5d049fa905d4-config-volume\") on node \"crc\" DevicePath \"\"" Dec 06 16:00:02 crc kubenswrapper[5003]: I1206 16:00:02.754757 5003 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/4d39c7ff-1dd2-4b57-83f8-5d049fa905d4-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 06 16:00:02 crc kubenswrapper[5003]: I1206 16:00:02.754775 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mtzx2\" (UniqueName: \"kubernetes.io/projected/4d39c7ff-1dd2-4b57-83f8-5d049fa905d4-kube-api-access-mtzx2\") on node \"crc\" DevicePath \"\"" Dec 06 16:00:03 crc kubenswrapper[5003]: I1206 16:00:03.274745 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29417280-nnjd6" event={"ID":"4d39c7ff-1dd2-4b57-83f8-5d049fa905d4","Type":"ContainerDied","Data":"ae9a714ba7d4f1bbe74b733893c37cc81d24a7477ed73f9ead50ec7d7cd40204"} Dec 06 16:00:03 crc kubenswrapper[5003]: I1206 16:00:03.274789 5003 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ae9a714ba7d4f1bbe74b733893c37cc81d24a7477ed73f9ead50ec7d7cd40204" Dec 06 16:00:03 crc kubenswrapper[5003]: I1206 16:00:03.274823 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29417280-nnjd6" Dec 06 16:00:11 crc kubenswrapper[5003]: I1206 16:00:11.717973 5003 scope.go:117] "RemoveContainer" containerID="5cab9da0a1a459e6915d570312c7512355a1b9744939c5aec7b823b431cc45d8" Dec 06 16:00:11 crc kubenswrapper[5003]: E1206 16:00:11.719014 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w25db_openshift-machine-config-operator(1a047c4d-003e-4668-9b96-945eab34ab68)\"" pod="openshift-machine-config-operator/machine-config-daemon-w25db" podUID="1a047c4d-003e-4668-9b96-945eab34ab68" Dec 06 16:00:16 crc kubenswrapper[5003]: I1206 16:00:16.658740 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-x6k88_8e450a8e-52f9-48fe-96c8-8f444a7437fe/control-plane-machine-set-operator/0.log" Dec 06 16:00:16 crc kubenswrapper[5003]: I1206 16:00:16.867940 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-dh4ts_07dcad69-d3a4-40e2-a4d2-e83eb74631d7/kube-rbac-proxy/0.log" Dec 06 16:00:16 crc kubenswrapper[5003]: I1206 16:00:16.901279 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-dh4ts_07dcad69-d3a4-40e2-a4d2-e83eb74631d7/machine-api-operator/0.log" Dec 06 16:00:26 crc kubenswrapper[5003]: I1206 16:00:26.712377 5003 scope.go:117] "RemoveContainer" containerID="5cab9da0a1a459e6915d570312c7512355a1b9744939c5aec7b823b431cc45d8" Dec 06 16:00:26 crc kubenswrapper[5003]: E1206 16:00:26.712898 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w25db_openshift-machine-config-operator(1a047c4d-003e-4668-9b96-945eab34ab68)\"" pod="openshift-machine-config-operator/machine-config-daemon-w25db" podUID="1a047c4d-003e-4668-9b96-945eab34ab68" Dec 06 16:00:31 crc kubenswrapper[5003]: I1206 16:00:31.223137 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-f8648f98b-2ddqw_949aaefb-e672-4000-8a50-e943723611ff/kube-rbac-proxy/0.log" Dec 06 16:00:31 crc kubenswrapper[5003]: I1206 16:00:31.265184 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-f8648f98b-2ddqw_949aaefb-e672-4000-8a50-e943723611ff/controller/0.log" Dec 06 16:00:31 crc kubenswrapper[5003]: I1206 16:00:31.395255 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-fs94m_395612ce-6ba7-4b60-822c-dbae3eea5e7f/cp-frr-files/0.log" Dec 06 16:00:31 crc kubenswrapper[5003]: I1206 16:00:31.548345 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-fs94m_395612ce-6ba7-4b60-822c-dbae3eea5e7f/cp-frr-files/0.log" Dec 06 16:00:31 crc kubenswrapper[5003]: I1206 16:00:31.558224 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-fs94m_395612ce-6ba7-4b60-822c-dbae3eea5e7f/cp-reloader/0.log" Dec 06 16:00:31 crc kubenswrapper[5003]: I1206 16:00:31.585632 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-fs94m_395612ce-6ba7-4b60-822c-dbae3eea5e7f/cp-reloader/0.log" Dec 06 16:00:31 crc kubenswrapper[5003]: I1206 16:00:31.588261 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-fs94m_395612ce-6ba7-4b60-822c-dbae3eea5e7f/cp-metrics/0.log" Dec 06 16:00:31 crc kubenswrapper[5003]: I1206 16:00:31.796397 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-fs94m_395612ce-6ba7-4b60-822c-dbae3eea5e7f/cp-frr-files/0.log" Dec 06 16:00:31 crc kubenswrapper[5003]: I1206 16:00:31.827962 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-fs94m_395612ce-6ba7-4b60-822c-dbae3eea5e7f/cp-metrics/0.log" Dec 06 16:00:31 crc kubenswrapper[5003]: I1206 16:00:31.829873 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-fs94m_395612ce-6ba7-4b60-822c-dbae3eea5e7f/cp-metrics/0.log" Dec 06 16:00:31 crc kubenswrapper[5003]: I1206 16:00:31.834310 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-fs94m_395612ce-6ba7-4b60-822c-dbae3eea5e7f/cp-reloader/0.log" Dec 06 16:00:31 crc kubenswrapper[5003]: I1206 16:00:31.953887 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-fs94m_395612ce-6ba7-4b60-822c-dbae3eea5e7f/cp-frr-files/0.log" Dec 06 16:00:31 crc kubenswrapper[5003]: I1206 16:00:31.994792 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-fs94m_395612ce-6ba7-4b60-822c-dbae3eea5e7f/cp-reloader/0.log" Dec 06 16:00:32 crc kubenswrapper[5003]: I1206 16:00:32.004692 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-fs94m_395612ce-6ba7-4b60-822c-dbae3eea5e7f/cp-metrics/0.log" Dec 06 16:00:32 crc kubenswrapper[5003]: I1206 16:00:32.004767 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-fs94m_395612ce-6ba7-4b60-822c-dbae3eea5e7f/controller/0.log" Dec 06 16:00:32 crc kubenswrapper[5003]: I1206 16:00:32.194042 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-fs94m_395612ce-6ba7-4b60-822c-dbae3eea5e7f/frr-metrics/0.log" Dec 06 16:00:32 crc kubenswrapper[5003]: I1206 16:00:32.208737 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-fs94m_395612ce-6ba7-4b60-822c-dbae3eea5e7f/kube-rbac-proxy/0.log" Dec 06 16:00:32 crc kubenswrapper[5003]: I1206 16:00:32.246633 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-fs94m_395612ce-6ba7-4b60-822c-dbae3eea5e7f/kube-rbac-proxy-frr/0.log" Dec 06 16:00:32 crc kubenswrapper[5003]: I1206 16:00:32.435794 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-fs94m_395612ce-6ba7-4b60-822c-dbae3eea5e7f/reloader/0.log" Dec 06 16:00:32 crc kubenswrapper[5003]: I1206 16:00:32.459956 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-fs94m_395612ce-6ba7-4b60-822c-dbae3eea5e7f/frr/0.log" Dec 06 16:00:32 crc kubenswrapper[5003]: I1206 16:00:32.469631 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-webhook-server-7fcb986d4-2jzmn_8d4e8c3d-e37b-4489-bd61-84af9e792de1/frr-k8s-webhook-server/0.log" Dec 06 16:00:32 crc kubenswrapper[5003]: I1206 16:00:32.629749 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-controller-manager-6fb7b5787c-jtd2n_e1a24ae6-f251-42bc-bb3e-1d9bd03dd13e/manager/0.log" Dec 06 16:00:32 crc kubenswrapper[5003]: I1206 16:00:32.656790 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-webhook-server-576499f99b-6pz7r_49994fd5-c0aa-446a-b546-d3e0acc4fa81/webhook-server/0.log" Dec 06 16:00:32 crc kubenswrapper[5003]: I1206 16:00:32.776022 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-vtb75_50405731-e195-43a8-a231-895b2b19b554/kube-rbac-proxy/0.log" Dec 06 16:00:32 crc kubenswrapper[5003]: I1206 16:00:32.903704 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-vtb75_50405731-e195-43a8-a231-895b2b19b554/speaker/0.log" Dec 06 16:00:38 crc kubenswrapper[5003]: I1206 16:00:38.712453 5003 scope.go:117] "RemoveContainer" containerID="5cab9da0a1a459e6915d570312c7512355a1b9744939c5aec7b823b431cc45d8" Dec 06 16:00:38 crc kubenswrapper[5003]: E1206 16:00:38.713245 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w25db_openshift-machine-config-operator(1a047c4d-003e-4668-9b96-945eab34ab68)\"" pod="openshift-machine-config-operator/machine-config-daemon-w25db" podUID="1a047c4d-003e-4668-9b96-945eab34ab68" Dec 06 16:00:51 crc kubenswrapper[5003]: I1206 16:00:51.714524 5003 scope.go:117] "RemoveContainer" containerID="5cab9da0a1a459e6915d570312c7512355a1b9744939c5aec7b823b431cc45d8" Dec 06 16:00:51 crc kubenswrapper[5003]: E1206 16:00:51.715210 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w25db_openshift-machine-config-operator(1a047c4d-003e-4668-9b96-945eab34ab68)\"" pod="openshift-machine-config-operator/machine-config-daemon-w25db" podUID="1a047c4d-003e-4668-9b96-945eab34ab68" Dec 06 16:00:55 crc kubenswrapper[5003]: I1206 16:00:55.171457 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83gjfvn_ccb39b5c-9a0b-4ce9-a83c-a41fda667b92/util/0.log" Dec 06 16:00:55 crc kubenswrapper[5003]: I1206 16:00:55.350370 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83gjfvn_ccb39b5c-9a0b-4ce9-a83c-a41fda667b92/util/0.log" Dec 06 16:00:55 crc kubenswrapper[5003]: I1206 16:00:55.370927 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83gjfvn_ccb39b5c-9a0b-4ce9-a83c-a41fda667b92/pull/0.log" Dec 06 16:00:55 crc kubenswrapper[5003]: I1206 16:00:55.391481 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83gjfvn_ccb39b5c-9a0b-4ce9-a83c-a41fda667b92/pull/0.log" Dec 06 16:00:55 crc kubenswrapper[5003]: I1206 16:00:55.539677 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83gjfvn_ccb39b5c-9a0b-4ce9-a83c-a41fda667b92/util/0.log" Dec 06 16:00:55 crc kubenswrapper[5003]: I1206 16:00:55.551696 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83gjfvn_ccb39b5c-9a0b-4ce9-a83c-a41fda667b92/pull/0.log" Dec 06 16:00:55 crc kubenswrapper[5003]: I1206 16:00:55.553178 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83gjfvn_ccb39b5c-9a0b-4ce9-a83c-a41fda667b92/extract/0.log" Dec 06 16:00:55 crc kubenswrapper[5003]: I1206 16:00:55.714766 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-cvx9j_1b508a70-c3a8-4f75-ae70-38613a4011cb/extract-utilities/0.log" Dec 06 16:00:55 crc kubenswrapper[5003]: I1206 16:00:55.837824 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-cvx9j_1b508a70-c3a8-4f75-ae70-38613a4011cb/extract-utilities/0.log" Dec 06 16:00:55 crc kubenswrapper[5003]: I1206 16:00:55.855379 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-cvx9j_1b508a70-c3a8-4f75-ae70-38613a4011cb/extract-content/0.log" Dec 06 16:00:55 crc kubenswrapper[5003]: I1206 16:00:55.856699 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-cvx9j_1b508a70-c3a8-4f75-ae70-38613a4011cb/extract-content/0.log" Dec 06 16:00:56 crc kubenswrapper[5003]: I1206 16:00:56.051894 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-cvx9j_1b508a70-c3a8-4f75-ae70-38613a4011cb/extract-utilities/0.log" Dec 06 16:00:56 crc kubenswrapper[5003]: I1206 16:00:56.054296 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-cvx9j_1b508a70-c3a8-4f75-ae70-38613a4011cb/extract-content/0.log" Dec 06 16:00:56 crc kubenswrapper[5003]: I1206 16:00:56.251933 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-brzlr_462e91e7-2b3b-4fd2-bbb1-94b4a727fe1e/extract-utilities/0.log" Dec 06 16:00:56 crc kubenswrapper[5003]: I1206 16:00:56.431135 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-brzlr_462e91e7-2b3b-4fd2-bbb1-94b4a727fe1e/extract-utilities/0.log" Dec 06 16:00:56 crc kubenswrapper[5003]: I1206 16:00:56.432902 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-cvx9j_1b508a70-c3a8-4f75-ae70-38613a4011cb/registry-server/0.log" Dec 06 16:00:56 crc kubenswrapper[5003]: I1206 16:00:56.455116 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-brzlr_462e91e7-2b3b-4fd2-bbb1-94b4a727fe1e/extract-content/0.log" Dec 06 16:00:56 crc kubenswrapper[5003]: I1206 16:00:56.487888 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-brzlr_462e91e7-2b3b-4fd2-bbb1-94b4a727fe1e/extract-content/0.log" Dec 06 16:00:56 crc kubenswrapper[5003]: I1206 16:00:56.567980 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-brzlr_462e91e7-2b3b-4fd2-bbb1-94b4a727fe1e/extract-utilities/0.log" Dec 06 16:00:56 crc kubenswrapper[5003]: I1206 16:00:56.599339 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-brzlr_462e91e7-2b3b-4fd2-bbb1-94b4a727fe1e/extract-content/0.log" Dec 06 16:00:56 crc kubenswrapper[5003]: I1206 16:00:56.771251 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-5fqrn_4ae558d3-8724-4da4-bd37-89893945a2f3/marketplace-operator/0.log" Dec 06 16:00:56 crc kubenswrapper[5003]: I1206 16:00:56.904772 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-n6k6n_c4c46a65-2b6a-413a-9dd9-5aaa2d2041f4/extract-utilities/0.log" Dec 06 16:00:57 crc kubenswrapper[5003]: I1206 16:00:57.012129 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-n6k6n_c4c46a65-2b6a-413a-9dd9-5aaa2d2041f4/extract-utilities/0.log" Dec 06 16:00:57 crc kubenswrapper[5003]: I1206 16:00:57.133155 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-n6k6n_c4c46a65-2b6a-413a-9dd9-5aaa2d2041f4/extract-content/0.log" Dec 06 16:00:57 crc kubenswrapper[5003]: I1206 16:00:57.137952 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-brzlr_462e91e7-2b3b-4fd2-bbb1-94b4a727fe1e/registry-server/0.log" Dec 06 16:00:57 crc kubenswrapper[5003]: I1206 16:00:57.172271 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-n6k6n_c4c46a65-2b6a-413a-9dd9-5aaa2d2041f4/extract-content/0.log" Dec 06 16:00:57 crc kubenswrapper[5003]: I1206 16:00:57.302697 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-n6k6n_c4c46a65-2b6a-413a-9dd9-5aaa2d2041f4/extract-content/0.log" Dec 06 16:00:57 crc kubenswrapper[5003]: I1206 16:00:57.314011 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-n6k6n_c4c46a65-2b6a-413a-9dd9-5aaa2d2041f4/extract-utilities/0.log" Dec 06 16:00:57 crc kubenswrapper[5003]: I1206 16:00:57.432679 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-n6k6n_c4c46a65-2b6a-413a-9dd9-5aaa2d2041f4/registry-server/0.log" Dec 06 16:00:57 crc kubenswrapper[5003]: I1206 16:00:57.477810 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-4f6q7_ebf945ad-37bf-4837-8fce-af8b8634c82f/extract-utilities/0.log" Dec 06 16:00:57 crc kubenswrapper[5003]: I1206 16:00:57.657449 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-4f6q7_ebf945ad-37bf-4837-8fce-af8b8634c82f/extract-utilities/0.log" Dec 06 16:00:57 crc kubenswrapper[5003]: I1206 16:00:57.657861 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-4f6q7_ebf945ad-37bf-4837-8fce-af8b8634c82f/extract-content/0.log" Dec 06 16:00:57 crc kubenswrapper[5003]: I1206 16:00:57.695363 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-4f6q7_ebf945ad-37bf-4837-8fce-af8b8634c82f/extract-content/0.log" Dec 06 16:00:57 crc kubenswrapper[5003]: I1206 16:00:57.873433 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-4f6q7_ebf945ad-37bf-4837-8fce-af8b8634c82f/extract-utilities/0.log" Dec 06 16:00:57 crc kubenswrapper[5003]: I1206 16:00:57.894278 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-4f6q7_ebf945ad-37bf-4837-8fce-af8b8634c82f/extract-content/0.log" Dec 06 16:00:58 crc kubenswrapper[5003]: I1206 16:00:58.196978 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-4f6q7_ebf945ad-37bf-4837-8fce-af8b8634c82f/registry-server/0.log" Dec 06 16:01:04 crc kubenswrapper[5003]: I1206 16:01:04.712898 5003 scope.go:117] "RemoveContainer" containerID="5cab9da0a1a459e6915d570312c7512355a1b9744939c5aec7b823b431cc45d8" Dec 06 16:01:04 crc kubenswrapper[5003]: E1206 16:01:04.713840 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w25db_openshift-machine-config-operator(1a047c4d-003e-4668-9b96-945eab34ab68)\"" pod="openshift-machine-config-operator/machine-config-daemon-w25db" podUID="1a047c4d-003e-4668-9b96-945eab34ab68" Dec 06 16:01:15 crc kubenswrapper[5003]: I1206 16:01:15.713016 5003 scope.go:117] "RemoveContainer" containerID="5cab9da0a1a459e6915d570312c7512355a1b9744939c5aec7b823b431cc45d8" Dec 06 16:01:15 crc kubenswrapper[5003]: E1206 16:01:15.714049 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w25db_openshift-machine-config-operator(1a047c4d-003e-4668-9b96-945eab34ab68)\"" pod="openshift-machine-config-operator/machine-config-daemon-w25db" podUID="1a047c4d-003e-4668-9b96-945eab34ab68" Dec 06 16:01:28 crc kubenswrapper[5003]: I1206 16:01:28.712759 5003 scope.go:117] "RemoveContainer" containerID="5cab9da0a1a459e6915d570312c7512355a1b9744939c5aec7b823b431cc45d8" Dec 06 16:01:28 crc kubenswrapper[5003]: E1206 16:01:28.714083 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w25db_openshift-machine-config-operator(1a047c4d-003e-4668-9b96-945eab34ab68)\"" pod="openshift-machine-config-operator/machine-config-daemon-w25db" podUID="1a047c4d-003e-4668-9b96-945eab34ab68" Dec 06 16:01:39 crc kubenswrapper[5003]: I1206 16:01:39.712796 5003 scope.go:117] "RemoveContainer" containerID="5cab9da0a1a459e6915d570312c7512355a1b9744939c5aec7b823b431cc45d8" Dec 06 16:01:39 crc kubenswrapper[5003]: E1206 16:01:39.713610 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w25db_openshift-machine-config-operator(1a047c4d-003e-4668-9b96-945eab34ab68)\"" pod="openshift-machine-config-operator/machine-config-daemon-w25db" podUID="1a047c4d-003e-4668-9b96-945eab34ab68" Dec 06 16:01:50 crc kubenswrapper[5003]: I1206 16:01:50.712103 5003 scope.go:117] "RemoveContainer" containerID="5cab9da0a1a459e6915d570312c7512355a1b9744939c5aec7b823b431cc45d8" Dec 06 16:01:50 crc kubenswrapper[5003]: E1206 16:01:50.712905 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w25db_openshift-machine-config-operator(1a047c4d-003e-4668-9b96-945eab34ab68)\"" pod="openshift-machine-config-operator/machine-config-daemon-w25db" podUID="1a047c4d-003e-4668-9b96-945eab34ab68" Dec 06 16:02:04 crc kubenswrapper[5003]: I1206 16:02:04.633797 5003 generic.go:334] "Generic (PLEG): container finished" podID="148cb75b-a947-408a-bfa1-0dd81161f43b" containerID="0f46d7d349341a206352e3716b0f3eeae110c16f75d20157b96ed6d898e8a5ab" exitCode=0 Dec 06 16:02:04 crc kubenswrapper[5003]: I1206 16:02:04.633946 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-lfx5x/must-gather-s2984" event={"ID":"148cb75b-a947-408a-bfa1-0dd81161f43b","Type":"ContainerDied","Data":"0f46d7d349341a206352e3716b0f3eeae110c16f75d20157b96ed6d898e8a5ab"} Dec 06 16:02:04 crc kubenswrapper[5003]: I1206 16:02:04.634723 5003 scope.go:117] "RemoveContainer" containerID="0f46d7d349341a206352e3716b0f3eeae110c16f75d20157b96ed6d898e8a5ab" Dec 06 16:02:05 crc kubenswrapper[5003]: I1206 16:02:05.462790 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-lfx5x_must-gather-s2984_148cb75b-a947-408a-bfa1-0dd81161f43b/gather/0.log" Dec 06 16:02:05 crc kubenswrapper[5003]: I1206 16:02:05.711876 5003 scope.go:117] "RemoveContainer" containerID="5cab9da0a1a459e6915d570312c7512355a1b9744939c5aec7b823b431cc45d8" Dec 06 16:02:05 crc kubenswrapper[5003]: E1206 16:02:05.712085 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w25db_openshift-machine-config-operator(1a047c4d-003e-4668-9b96-945eab34ab68)\"" pod="openshift-machine-config-operator/machine-config-daemon-w25db" podUID="1a047c4d-003e-4668-9b96-945eab34ab68" Dec 06 16:02:15 crc kubenswrapper[5003]: I1206 16:02:15.206827 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-lfx5x/must-gather-s2984"] Dec 06 16:02:15 crc kubenswrapper[5003]: I1206 16:02:15.207465 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-must-gather-lfx5x/must-gather-s2984" podUID="148cb75b-a947-408a-bfa1-0dd81161f43b" containerName="copy" containerID="cri-o://1ae5685ee45d6f9bd40604b408b35342740b2f743a3b82a789534cab2c4b0946" gracePeriod=2 Dec 06 16:02:15 crc kubenswrapper[5003]: I1206 16:02:15.217176 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-lfx5x/must-gather-s2984"] Dec 06 16:02:15 crc kubenswrapper[5003]: I1206 16:02:15.523453 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-lfx5x_must-gather-s2984_148cb75b-a947-408a-bfa1-0dd81161f43b/copy/0.log" Dec 06 16:02:15 crc kubenswrapper[5003]: I1206 16:02:15.524350 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-lfx5x/must-gather-s2984" Dec 06 16:02:15 crc kubenswrapper[5003]: I1206 16:02:15.549393 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dktb4\" (UniqueName: \"kubernetes.io/projected/148cb75b-a947-408a-bfa1-0dd81161f43b-kube-api-access-dktb4\") pod \"148cb75b-a947-408a-bfa1-0dd81161f43b\" (UID: \"148cb75b-a947-408a-bfa1-0dd81161f43b\") " Dec 06 16:02:15 crc kubenswrapper[5003]: I1206 16:02:15.549572 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/148cb75b-a947-408a-bfa1-0dd81161f43b-must-gather-output\") pod \"148cb75b-a947-408a-bfa1-0dd81161f43b\" (UID: \"148cb75b-a947-408a-bfa1-0dd81161f43b\") " Dec 06 16:02:15 crc kubenswrapper[5003]: I1206 16:02:15.555014 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/148cb75b-a947-408a-bfa1-0dd81161f43b-kube-api-access-dktb4" (OuterVolumeSpecName: "kube-api-access-dktb4") pod "148cb75b-a947-408a-bfa1-0dd81161f43b" (UID: "148cb75b-a947-408a-bfa1-0dd81161f43b"). InnerVolumeSpecName "kube-api-access-dktb4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 16:02:15 crc kubenswrapper[5003]: I1206 16:02:15.617477 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/148cb75b-a947-408a-bfa1-0dd81161f43b-must-gather-output" (OuterVolumeSpecName: "must-gather-output") pod "148cb75b-a947-408a-bfa1-0dd81161f43b" (UID: "148cb75b-a947-408a-bfa1-0dd81161f43b"). InnerVolumeSpecName "must-gather-output". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 16:02:15 crc kubenswrapper[5003]: I1206 16:02:15.651077 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dktb4\" (UniqueName: \"kubernetes.io/projected/148cb75b-a947-408a-bfa1-0dd81161f43b-kube-api-access-dktb4\") on node \"crc\" DevicePath \"\"" Dec 06 16:02:15 crc kubenswrapper[5003]: I1206 16:02:15.651116 5003 reconciler_common.go:293] "Volume detached for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/148cb75b-a947-408a-bfa1-0dd81161f43b-must-gather-output\") on node \"crc\" DevicePath \"\"" Dec 06 16:02:15 crc kubenswrapper[5003]: I1206 16:02:15.701066 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-lfx5x_must-gather-s2984_148cb75b-a947-408a-bfa1-0dd81161f43b/copy/0.log" Dec 06 16:02:15 crc kubenswrapper[5003]: I1206 16:02:15.701538 5003 generic.go:334] "Generic (PLEG): container finished" podID="148cb75b-a947-408a-bfa1-0dd81161f43b" containerID="1ae5685ee45d6f9bd40604b408b35342740b2f743a3b82a789534cab2c4b0946" exitCode=143 Dec 06 16:02:15 crc kubenswrapper[5003]: I1206 16:02:15.701587 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-lfx5x/must-gather-s2984" Dec 06 16:02:15 crc kubenswrapper[5003]: I1206 16:02:15.701625 5003 scope.go:117] "RemoveContainer" containerID="1ae5685ee45d6f9bd40604b408b35342740b2f743a3b82a789534cab2c4b0946" Dec 06 16:02:15 crc kubenswrapper[5003]: I1206 16:02:15.717199 5003 scope.go:117] "RemoveContainer" containerID="0f46d7d349341a206352e3716b0f3eeae110c16f75d20157b96ed6d898e8a5ab" Dec 06 16:02:15 crc kubenswrapper[5003]: I1206 16:02:15.723627 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="148cb75b-a947-408a-bfa1-0dd81161f43b" path="/var/lib/kubelet/pods/148cb75b-a947-408a-bfa1-0dd81161f43b/volumes" Dec 06 16:02:15 crc kubenswrapper[5003]: I1206 16:02:15.749223 5003 scope.go:117] "RemoveContainer" containerID="1ae5685ee45d6f9bd40604b408b35342740b2f743a3b82a789534cab2c4b0946" Dec 06 16:02:15 crc kubenswrapper[5003]: E1206 16:02:15.749790 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1ae5685ee45d6f9bd40604b408b35342740b2f743a3b82a789534cab2c4b0946\": container with ID starting with 1ae5685ee45d6f9bd40604b408b35342740b2f743a3b82a789534cab2c4b0946 not found: ID does not exist" containerID="1ae5685ee45d6f9bd40604b408b35342740b2f743a3b82a789534cab2c4b0946" Dec 06 16:02:15 crc kubenswrapper[5003]: I1206 16:02:15.749938 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1ae5685ee45d6f9bd40604b408b35342740b2f743a3b82a789534cab2c4b0946"} err="failed to get container status \"1ae5685ee45d6f9bd40604b408b35342740b2f743a3b82a789534cab2c4b0946\": rpc error: code = NotFound desc = could not find container \"1ae5685ee45d6f9bd40604b408b35342740b2f743a3b82a789534cab2c4b0946\": container with ID starting with 1ae5685ee45d6f9bd40604b408b35342740b2f743a3b82a789534cab2c4b0946 not found: ID does not exist" Dec 06 16:02:15 crc kubenswrapper[5003]: I1206 16:02:15.750060 5003 scope.go:117] "RemoveContainer" containerID="0f46d7d349341a206352e3716b0f3eeae110c16f75d20157b96ed6d898e8a5ab" Dec 06 16:02:15 crc kubenswrapper[5003]: E1206 16:02:15.750564 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0f46d7d349341a206352e3716b0f3eeae110c16f75d20157b96ed6d898e8a5ab\": container with ID starting with 0f46d7d349341a206352e3716b0f3eeae110c16f75d20157b96ed6d898e8a5ab not found: ID does not exist" containerID="0f46d7d349341a206352e3716b0f3eeae110c16f75d20157b96ed6d898e8a5ab" Dec 06 16:02:15 crc kubenswrapper[5003]: I1206 16:02:15.750628 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0f46d7d349341a206352e3716b0f3eeae110c16f75d20157b96ed6d898e8a5ab"} err="failed to get container status \"0f46d7d349341a206352e3716b0f3eeae110c16f75d20157b96ed6d898e8a5ab\": rpc error: code = NotFound desc = could not find container \"0f46d7d349341a206352e3716b0f3eeae110c16f75d20157b96ed6d898e8a5ab\": container with ID starting with 0f46d7d349341a206352e3716b0f3eeae110c16f75d20157b96ed6d898e8a5ab not found: ID does not exist" Dec 06 16:02:20 crc kubenswrapper[5003]: I1206 16:02:20.712374 5003 scope.go:117] "RemoveContainer" containerID="5cab9da0a1a459e6915d570312c7512355a1b9744939c5aec7b823b431cc45d8" Dec 06 16:02:21 crc kubenswrapper[5003]: I1206 16:02:21.743171 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-w25db" event={"ID":"1a047c4d-003e-4668-9b96-945eab34ab68","Type":"ContainerStarted","Data":"0da04a93955c8df3b7b7c5985c028b011790f5735ee77e2295ce70d645e57ab5"} Dec 06 16:03:33 crc kubenswrapper[5003]: I1206 16:03:33.091676 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-nzqv7"] Dec 06 16:03:33 crc kubenswrapper[5003]: E1206 16:03:33.093770 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4d39c7ff-1dd2-4b57-83f8-5d049fa905d4" containerName="collect-profiles" Dec 06 16:03:33 crc kubenswrapper[5003]: I1206 16:03:33.093864 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="4d39c7ff-1dd2-4b57-83f8-5d049fa905d4" containerName="collect-profiles" Dec 06 16:03:33 crc kubenswrapper[5003]: E1206 16:03:33.093946 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="148cb75b-a947-408a-bfa1-0dd81161f43b" containerName="gather" Dec 06 16:03:33 crc kubenswrapper[5003]: I1206 16:03:33.094014 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="148cb75b-a947-408a-bfa1-0dd81161f43b" containerName="gather" Dec 06 16:03:33 crc kubenswrapper[5003]: E1206 16:03:33.094087 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="148cb75b-a947-408a-bfa1-0dd81161f43b" containerName="copy" Dec 06 16:03:33 crc kubenswrapper[5003]: I1206 16:03:33.094147 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="148cb75b-a947-408a-bfa1-0dd81161f43b" containerName="copy" Dec 06 16:03:33 crc kubenswrapper[5003]: I1206 16:03:33.094299 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="4d39c7ff-1dd2-4b57-83f8-5d049fa905d4" containerName="collect-profiles" Dec 06 16:03:33 crc kubenswrapper[5003]: I1206 16:03:33.094400 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="148cb75b-a947-408a-bfa1-0dd81161f43b" containerName="gather" Dec 06 16:03:33 crc kubenswrapper[5003]: I1206 16:03:33.094472 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="148cb75b-a947-408a-bfa1-0dd81161f43b" containerName="copy" Dec 06 16:03:33 crc kubenswrapper[5003]: I1206 16:03:33.095601 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-nzqv7" Dec 06 16:03:33 crc kubenswrapper[5003]: I1206 16:03:33.101148 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-nzqv7"] Dec 06 16:03:33 crc kubenswrapper[5003]: I1206 16:03:33.147180 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2f3d5409-2e3d-4c16-8d73-05463b3b3782-utilities\") pod \"redhat-marketplace-nzqv7\" (UID: \"2f3d5409-2e3d-4c16-8d73-05463b3b3782\") " pod="openshift-marketplace/redhat-marketplace-nzqv7" Dec 06 16:03:33 crc kubenswrapper[5003]: I1206 16:03:33.147236 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rnskr\" (UniqueName: \"kubernetes.io/projected/2f3d5409-2e3d-4c16-8d73-05463b3b3782-kube-api-access-rnskr\") pod \"redhat-marketplace-nzqv7\" (UID: \"2f3d5409-2e3d-4c16-8d73-05463b3b3782\") " pod="openshift-marketplace/redhat-marketplace-nzqv7" Dec 06 16:03:33 crc kubenswrapper[5003]: I1206 16:03:33.147284 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2f3d5409-2e3d-4c16-8d73-05463b3b3782-catalog-content\") pod \"redhat-marketplace-nzqv7\" (UID: \"2f3d5409-2e3d-4c16-8d73-05463b3b3782\") " pod="openshift-marketplace/redhat-marketplace-nzqv7" Dec 06 16:03:33 crc kubenswrapper[5003]: I1206 16:03:33.248677 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2f3d5409-2e3d-4c16-8d73-05463b3b3782-utilities\") pod \"redhat-marketplace-nzqv7\" (UID: \"2f3d5409-2e3d-4c16-8d73-05463b3b3782\") " pod="openshift-marketplace/redhat-marketplace-nzqv7" Dec 06 16:03:33 crc kubenswrapper[5003]: I1206 16:03:33.248724 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rnskr\" (UniqueName: \"kubernetes.io/projected/2f3d5409-2e3d-4c16-8d73-05463b3b3782-kube-api-access-rnskr\") pod \"redhat-marketplace-nzqv7\" (UID: \"2f3d5409-2e3d-4c16-8d73-05463b3b3782\") " pod="openshift-marketplace/redhat-marketplace-nzqv7" Dec 06 16:03:33 crc kubenswrapper[5003]: I1206 16:03:33.248760 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2f3d5409-2e3d-4c16-8d73-05463b3b3782-catalog-content\") pod \"redhat-marketplace-nzqv7\" (UID: \"2f3d5409-2e3d-4c16-8d73-05463b3b3782\") " pod="openshift-marketplace/redhat-marketplace-nzqv7" Dec 06 16:03:33 crc kubenswrapper[5003]: I1206 16:03:33.249197 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2f3d5409-2e3d-4c16-8d73-05463b3b3782-utilities\") pod \"redhat-marketplace-nzqv7\" (UID: \"2f3d5409-2e3d-4c16-8d73-05463b3b3782\") " pod="openshift-marketplace/redhat-marketplace-nzqv7" Dec 06 16:03:33 crc kubenswrapper[5003]: I1206 16:03:33.249232 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2f3d5409-2e3d-4c16-8d73-05463b3b3782-catalog-content\") pod \"redhat-marketplace-nzqv7\" (UID: \"2f3d5409-2e3d-4c16-8d73-05463b3b3782\") " pod="openshift-marketplace/redhat-marketplace-nzqv7" Dec 06 16:03:33 crc kubenswrapper[5003]: I1206 16:03:33.269705 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rnskr\" (UniqueName: \"kubernetes.io/projected/2f3d5409-2e3d-4c16-8d73-05463b3b3782-kube-api-access-rnskr\") pod \"redhat-marketplace-nzqv7\" (UID: \"2f3d5409-2e3d-4c16-8d73-05463b3b3782\") " pod="openshift-marketplace/redhat-marketplace-nzqv7" Dec 06 16:03:33 crc kubenswrapper[5003]: I1206 16:03:33.416571 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-nzqv7" Dec 06 16:03:33 crc kubenswrapper[5003]: I1206 16:03:33.624420 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-nzqv7"] Dec 06 16:03:33 crc kubenswrapper[5003]: W1206 16:03:33.629716 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2f3d5409_2e3d_4c16_8d73_05463b3b3782.slice/crio-661b26a9559c5101a55c64ba174733b52f2ebf6ea2e0e5cc41afe7201ec55084 WatchSource:0}: Error finding container 661b26a9559c5101a55c64ba174733b52f2ebf6ea2e0e5cc41afe7201ec55084: Status 404 returned error can't find the container with id 661b26a9559c5101a55c64ba174733b52f2ebf6ea2e0e5cc41afe7201ec55084 Dec 06 16:03:34 crc kubenswrapper[5003]: I1206 16:03:34.184645 5003 generic.go:334] "Generic (PLEG): container finished" podID="2f3d5409-2e3d-4c16-8d73-05463b3b3782" containerID="3a7c4b0644f3ffe6609410b41a8f27ef4bcab9d6f86db832e3390db2c61bbd17" exitCode=0 Dec 06 16:03:34 crc kubenswrapper[5003]: I1206 16:03:34.184709 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-nzqv7" event={"ID":"2f3d5409-2e3d-4c16-8d73-05463b3b3782","Type":"ContainerDied","Data":"3a7c4b0644f3ffe6609410b41a8f27ef4bcab9d6f86db832e3390db2c61bbd17"} Dec 06 16:03:34 crc kubenswrapper[5003]: I1206 16:03:34.184947 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-nzqv7" event={"ID":"2f3d5409-2e3d-4c16-8d73-05463b3b3782","Type":"ContainerStarted","Data":"661b26a9559c5101a55c64ba174733b52f2ebf6ea2e0e5cc41afe7201ec55084"} Dec 06 16:03:34 crc kubenswrapper[5003]: I1206 16:03:34.187778 5003 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 06 16:03:35 crc kubenswrapper[5003]: I1206 16:03:35.193062 5003 generic.go:334] "Generic (PLEG): container finished" podID="2f3d5409-2e3d-4c16-8d73-05463b3b3782" containerID="d51a13010dcbe9b3edf57c1c5d8fc709d7c6cf0b060ce959e3fef5afdb5c6b8c" exitCode=0 Dec 06 16:03:35 crc kubenswrapper[5003]: I1206 16:03:35.193102 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-nzqv7" event={"ID":"2f3d5409-2e3d-4c16-8d73-05463b3b3782","Type":"ContainerDied","Data":"d51a13010dcbe9b3edf57c1c5d8fc709d7c6cf0b060ce959e3fef5afdb5c6b8c"} Dec 06 16:03:36 crc kubenswrapper[5003]: I1206 16:03:36.203174 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-nzqv7" event={"ID":"2f3d5409-2e3d-4c16-8d73-05463b3b3782","Type":"ContainerStarted","Data":"61356ad9581552114e3bba035febfc16651e3ed9b823d58c811324202ef3910f"} Dec 06 16:03:36 crc kubenswrapper[5003]: I1206 16:03:36.222563 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-nzqv7" podStartSLOduration=1.797064532 podStartE2EDuration="3.222536997s" podCreationTimestamp="2025-12-06 16:03:33 +0000 UTC" firstStartedPulling="2025-12-06 16:03:34.187392401 +0000 UTC m=+1892.720746802" lastFinishedPulling="2025-12-06 16:03:35.612864896 +0000 UTC m=+1894.146219267" observedRunningTime="2025-12-06 16:03:36.218914497 +0000 UTC m=+1894.752268958" watchObservedRunningTime="2025-12-06 16:03:36.222536997 +0000 UTC m=+1894.755891408" Dec 06 16:03:43 crc kubenswrapper[5003]: I1206 16:03:43.417823 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-nzqv7" Dec 06 16:03:43 crc kubenswrapper[5003]: I1206 16:03:43.418524 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-nzqv7" Dec 06 16:03:43 crc kubenswrapper[5003]: I1206 16:03:43.467516 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-nzqv7" Dec 06 16:03:44 crc kubenswrapper[5003]: I1206 16:03:44.327290 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-nzqv7" Dec 06 16:03:44 crc kubenswrapper[5003]: I1206 16:03:44.377140 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-nzqv7"] Dec 06 16:03:46 crc kubenswrapper[5003]: I1206 16:03:46.276467 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-nzqv7" podUID="2f3d5409-2e3d-4c16-8d73-05463b3b3782" containerName="registry-server" containerID="cri-o://61356ad9581552114e3bba035febfc16651e3ed9b823d58c811324202ef3910f" gracePeriod=2 Dec 06 16:03:47 crc kubenswrapper[5003]: I1206 16:03:47.262818 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-nzqv7" Dec 06 16:03:47 crc kubenswrapper[5003]: I1206 16:03:47.287472 5003 generic.go:334] "Generic (PLEG): container finished" podID="2f3d5409-2e3d-4c16-8d73-05463b3b3782" containerID="61356ad9581552114e3bba035febfc16651e3ed9b823d58c811324202ef3910f" exitCode=0 Dec 06 16:03:47 crc kubenswrapper[5003]: I1206 16:03:47.287564 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-nzqv7" event={"ID":"2f3d5409-2e3d-4c16-8d73-05463b3b3782","Type":"ContainerDied","Data":"61356ad9581552114e3bba035febfc16651e3ed9b823d58c811324202ef3910f"} Dec 06 16:03:47 crc kubenswrapper[5003]: I1206 16:03:47.287614 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-nzqv7" event={"ID":"2f3d5409-2e3d-4c16-8d73-05463b3b3782","Type":"ContainerDied","Data":"661b26a9559c5101a55c64ba174733b52f2ebf6ea2e0e5cc41afe7201ec55084"} Dec 06 16:03:47 crc kubenswrapper[5003]: I1206 16:03:47.287641 5003 scope.go:117] "RemoveContainer" containerID="61356ad9581552114e3bba035febfc16651e3ed9b823d58c811324202ef3910f" Dec 06 16:03:47 crc kubenswrapper[5003]: I1206 16:03:47.287638 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-nzqv7" Dec 06 16:03:47 crc kubenswrapper[5003]: I1206 16:03:47.312628 5003 scope.go:117] "RemoveContainer" containerID="d51a13010dcbe9b3edf57c1c5d8fc709d7c6cf0b060ce959e3fef5afdb5c6b8c" Dec 06 16:03:47 crc kubenswrapper[5003]: I1206 16:03:47.329767 5003 scope.go:117] "RemoveContainer" containerID="3a7c4b0644f3ffe6609410b41a8f27ef4bcab9d6f86db832e3390db2c61bbd17" Dec 06 16:03:47 crc kubenswrapper[5003]: I1206 16:03:47.345414 5003 scope.go:117] "RemoveContainer" containerID="61356ad9581552114e3bba035febfc16651e3ed9b823d58c811324202ef3910f" Dec 06 16:03:47 crc kubenswrapper[5003]: E1206 16:03:47.345878 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"61356ad9581552114e3bba035febfc16651e3ed9b823d58c811324202ef3910f\": container with ID starting with 61356ad9581552114e3bba035febfc16651e3ed9b823d58c811324202ef3910f not found: ID does not exist" containerID="61356ad9581552114e3bba035febfc16651e3ed9b823d58c811324202ef3910f" Dec 06 16:03:47 crc kubenswrapper[5003]: I1206 16:03:47.345919 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"61356ad9581552114e3bba035febfc16651e3ed9b823d58c811324202ef3910f"} err="failed to get container status \"61356ad9581552114e3bba035febfc16651e3ed9b823d58c811324202ef3910f\": rpc error: code = NotFound desc = could not find container \"61356ad9581552114e3bba035febfc16651e3ed9b823d58c811324202ef3910f\": container with ID starting with 61356ad9581552114e3bba035febfc16651e3ed9b823d58c811324202ef3910f not found: ID does not exist" Dec 06 16:03:47 crc kubenswrapper[5003]: I1206 16:03:47.345947 5003 scope.go:117] "RemoveContainer" containerID="d51a13010dcbe9b3edf57c1c5d8fc709d7c6cf0b060ce959e3fef5afdb5c6b8c" Dec 06 16:03:47 crc kubenswrapper[5003]: E1206 16:03:47.346274 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d51a13010dcbe9b3edf57c1c5d8fc709d7c6cf0b060ce959e3fef5afdb5c6b8c\": container with ID starting with d51a13010dcbe9b3edf57c1c5d8fc709d7c6cf0b060ce959e3fef5afdb5c6b8c not found: ID does not exist" containerID="d51a13010dcbe9b3edf57c1c5d8fc709d7c6cf0b060ce959e3fef5afdb5c6b8c" Dec 06 16:03:47 crc kubenswrapper[5003]: I1206 16:03:47.346303 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d51a13010dcbe9b3edf57c1c5d8fc709d7c6cf0b060ce959e3fef5afdb5c6b8c"} err="failed to get container status \"d51a13010dcbe9b3edf57c1c5d8fc709d7c6cf0b060ce959e3fef5afdb5c6b8c\": rpc error: code = NotFound desc = could not find container \"d51a13010dcbe9b3edf57c1c5d8fc709d7c6cf0b060ce959e3fef5afdb5c6b8c\": container with ID starting with d51a13010dcbe9b3edf57c1c5d8fc709d7c6cf0b060ce959e3fef5afdb5c6b8c not found: ID does not exist" Dec 06 16:03:47 crc kubenswrapper[5003]: I1206 16:03:47.346321 5003 scope.go:117] "RemoveContainer" containerID="3a7c4b0644f3ffe6609410b41a8f27ef4bcab9d6f86db832e3390db2c61bbd17" Dec 06 16:03:47 crc kubenswrapper[5003]: E1206 16:03:47.346591 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3a7c4b0644f3ffe6609410b41a8f27ef4bcab9d6f86db832e3390db2c61bbd17\": container with ID starting with 3a7c4b0644f3ffe6609410b41a8f27ef4bcab9d6f86db832e3390db2c61bbd17 not found: ID does not exist" containerID="3a7c4b0644f3ffe6609410b41a8f27ef4bcab9d6f86db832e3390db2c61bbd17" Dec 06 16:03:47 crc kubenswrapper[5003]: I1206 16:03:47.346611 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3a7c4b0644f3ffe6609410b41a8f27ef4bcab9d6f86db832e3390db2c61bbd17"} err="failed to get container status \"3a7c4b0644f3ffe6609410b41a8f27ef4bcab9d6f86db832e3390db2c61bbd17\": rpc error: code = NotFound desc = could not find container \"3a7c4b0644f3ffe6609410b41a8f27ef4bcab9d6f86db832e3390db2c61bbd17\": container with ID starting with 3a7c4b0644f3ffe6609410b41a8f27ef4bcab9d6f86db832e3390db2c61bbd17 not found: ID does not exist" Dec 06 16:03:47 crc kubenswrapper[5003]: I1206 16:03:47.447238 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2f3d5409-2e3d-4c16-8d73-05463b3b3782-catalog-content\") pod \"2f3d5409-2e3d-4c16-8d73-05463b3b3782\" (UID: \"2f3d5409-2e3d-4c16-8d73-05463b3b3782\") " Dec 06 16:03:47 crc kubenswrapper[5003]: I1206 16:03:47.447382 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rnskr\" (UniqueName: \"kubernetes.io/projected/2f3d5409-2e3d-4c16-8d73-05463b3b3782-kube-api-access-rnskr\") pod \"2f3d5409-2e3d-4c16-8d73-05463b3b3782\" (UID: \"2f3d5409-2e3d-4c16-8d73-05463b3b3782\") " Dec 06 16:03:47 crc kubenswrapper[5003]: I1206 16:03:47.447432 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2f3d5409-2e3d-4c16-8d73-05463b3b3782-utilities\") pod \"2f3d5409-2e3d-4c16-8d73-05463b3b3782\" (UID: \"2f3d5409-2e3d-4c16-8d73-05463b3b3782\") " Dec 06 16:03:47 crc kubenswrapper[5003]: I1206 16:03:47.448694 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2f3d5409-2e3d-4c16-8d73-05463b3b3782-utilities" (OuterVolumeSpecName: "utilities") pod "2f3d5409-2e3d-4c16-8d73-05463b3b3782" (UID: "2f3d5409-2e3d-4c16-8d73-05463b3b3782"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 16:03:47 crc kubenswrapper[5003]: I1206 16:03:47.456619 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2f3d5409-2e3d-4c16-8d73-05463b3b3782-kube-api-access-rnskr" (OuterVolumeSpecName: "kube-api-access-rnskr") pod "2f3d5409-2e3d-4c16-8d73-05463b3b3782" (UID: "2f3d5409-2e3d-4c16-8d73-05463b3b3782"). InnerVolumeSpecName "kube-api-access-rnskr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 16:03:47 crc kubenswrapper[5003]: I1206 16:03:47.489373 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2f3d5409-2e3d-4c16-8d73-05463b3b3782-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "2f3d5409-2e3d-4c16-8d73-05463b3b3782" (UID: "2f3d5409-2e3d-4c16-8d73-05463b3b3782"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 16:03:47 crc kubenswrapper[5003]: I1206 16:03:47.548853 5003 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2f3d5409-2e3d-4c16-8d73-05463b3b3782-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 06 16:03:47 crc kubenswrapper[5003]: I1206 16:03:47.548884 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rnskr\" (UniqueName: \"kubernetes.io/projected/2f3d5409-2e3d-4c16-8d73-05463b3b3782-kube-api-access-rnskr\") on node \"crc\" DevicePath \"\"" Dec 06 16:03:47 crc kubenswrapper[5003]: I1206 16:03:47.548896 5003 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2f3d5409-2e3d-4c16-8d73-05463b3b3782-utilities\") on node \"crc\" DevicePath \"\"" Dec 06 16:03:47 crc kubenswrapper[5003]: I1206 16:03:47.621617 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-nzqv7"] Dec 06 16:03:47 crc kubenswrapper[5003]: I1206 16:03:47.628997 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-nzqv7"] Dec 06 16:03:47 crc kubenswrapper[5003]: I1206 16:03:47.721323 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2f3d5409-2e3d-4c16-8d73-05463b3b3782" path="/var/lib/kubelet/pods/2f3d5409-2e3d-4c16-8d73-05463b3b3782/volumes" Dec 06 16:04:48 crc kubenswrapper[5003]: I1206 16:04:48.572385 5003 patch_prober.go:28] interesting pod/machine-config-daemon-w25db container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 06 16:04:48 crc kubenswrapper[5003]: I1206 16:04:48.576723 5003 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-w25db" podUID="1a047c4d-003e-4668-9b96-945eab34ab68" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 06 16:05:18 crc kubenswrapper[5003]: I1206 16:05:18.572969 5003 patch_prober.go:28] interesting pod/machine-config-daemon-w25db container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 06 16:05:18 crc kubenswrapper[5003]: I1206 16:05:18.573425 5003 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-w25db" podUID="1a047c4d-003e-4668-9b96-945eab34ab68" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 06 16:05:48 crc kubenswrapper[5003]: I1206 16:05:48.573143 5003 patch_prober.go:28] interesting pod/machine-config-daemon-w25db container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 06 16:05:48 crc kubenswrapper[5003]: I1206 16:05:48.573670 5003 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-w25db" podUID="1a047c4d-003e-4668-9b96-945eab34ab68" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 06 16:05:48 crc kubenswrapper[5003]: I1206 16:05:48.573724 5003 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-w25db" Dec 06 16:05:48 crc kubenswrapper[5003]: I1206 16:05:48.574359 5003 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"0da04a93955c8df3b7b7c5985c028b011790f5735ee77e2295ce70d645e57ab5"} pod="openshift-machine-config-operator/machine-config-daemon-w25db" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 06 16:05:48 crc kubenswrapper[5003]: I1206 16:05:48.574413 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-w25db" podUID="1a047c4d-003e-4668-9b96-945eab34ab68" containerName="machine-config-daemon" containerID="cri-o://0da04a93955c8df3b7b7c5985c028b011790f5735ee77e2295ce70d645e57ab5" gracePeriod=600 Dec 06 16:05:49 crc kubenswrapper[5003]: I1206 16:05:49.161472 5003 generic.go:334] "Generic (PLEG): container finished" podID="1a047c4d-003e-4668-9b96-945eab34ab68" containerID="0da04a93955c8df3b7b7c5985c028b011790f5735ee77e2295ce70d645e57ab5" exitCode=0 Dec 06 16:05:49 crc kubenswrapper[5003]: I1206 16:05:49.161817 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-w25db" event={"ID":"1a047c4d-003e-4668-9b96-945eab34ab68","Type":"ContainerDied","Data":"0da04a93955c8df3b7b7c5985c028b011790f5735ee77e2295ce70d645e57ab5"} Dec 06 16:05:49 crc kubenswrapper[5003]: I1206 16:05:49.161846 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-w25db" event={"ID":"1a047c4d-003e-4668-9b96-945eab34ab68","Type":"ContainerStarted","Data":"b48c3f090ed4b7f4c045dc5116a917265771b1227b1b711ce5ed4904201138ad"} Dec 06 16:05:49 crc kubenswrapper[5003]: I1206 16:05:49.161860 5003 scope.go:117] "RemoveContainer" containerID="5cab9da0a1a459e6915d570312c7512355a1b9744939c5aec7b823b431cc45d8" var/home/core/zuul-output/logs/crc-cloud-workdir-crc-all-logs.tar.gz0000644000175000000000000000005515115052210024435 0ustar coreroot‹íÁ  ÷Om7 €7šÞ'(var/home/core/zuul-output/logs/crc-cloud/0000755000175000000000000000000015115052210017352 5ustar corerootvar/home/core/zuul-output/artifacts/0000755000175000017500000000000015115045710016504 5ustar corecorevar/home/core/zuul-output/docs/0000755000175000017500000000000015115045711015455 5ustar corecore